var/home/core/zuul-output/0000755000175000017500000000000015067143245014534 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067147457015511 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003723715415067147451017723 0ustar rootrootOct 01 06:16:37 crc systemd[1]: Starting Kubernetes Kubelet... Oct 01 06:16:37 crc restorecon[4663]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:37 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 01 06:16:38 crc restorecon[4663]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 01 06:16:39 crc kubenswrapper[4747]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 06:16:39 crc kubenswrapper[4747]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 01 06:16:39 crc kubenswrapper[4747]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 06:16:39 crc kubenswrapper[4747]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 06:16:39 crc kubenswrapper[4747]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 01 06:16:39 crc kubenswrapper[4747]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.033799 4747 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.040964 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.040996 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041007 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041017 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041027 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041036 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041044 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041052 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041060 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041067 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041075 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041083 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041091 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041099 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041107 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041115 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041123 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041133 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041143 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041151 4747 feature_gate.go:330] unrecognized feature gate: Example Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041158 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041181 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041189 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041196 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041204 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041211 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041219 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041227 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041235 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041243 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041250 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041260 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041271 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041281 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041290 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041299 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041308 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041318 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041326 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041334 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041342 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041352 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041360 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041368 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041376 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041383 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041392 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041400 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041408 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041416 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041424 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041432 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041440 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041448 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041456 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041463 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041471 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041479 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041486 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041494 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041502 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041509 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041517 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041524 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041532 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041540 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041547 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041555 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041573 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041583 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.041593 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041739 4747 flags.go:64] FLAG: --address="0.0.0.0" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041796 4747 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041811 4747 flags.go:64] FLAG: --anonymous-auth="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041822 4747 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041834 4747 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041843 4747 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041854 4747 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041865 4747 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041875 4747 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041884 4747 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041894 4747 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041904 4747 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041913 4747 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041922 4747 flags.go:64] FLAG: --cgroup-root="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041931 4747 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041940 4747 flags.go:64] FLAG: --client-ca-file="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041948 4747 flags.go:64] FLAG: --cloud-config="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041957 4747 flags.go:64] FLAG: --cloud-provider="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041965 4747 flags.go:64] FLAG: --cluster-dns="[]" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041988 4747 flags.go:64] FLAG: --cluster-domain="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.041998 4747 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042007 4747 flags.go:64] FLAG: --config-dir="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042016 4747 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042025 4747 flags.go:64] FLAG: --container-log-max-files="5" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042035 4747 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042046 4747 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042054 4747 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042064 4747 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042073 4747 flags.go:64] FLAG: --contention-profiling="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042081 4747 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042090 4747 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042101 4747 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042110 4747 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042125 4747 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042134 4747 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042143 4747 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042151 4747 flags.go:64] FLAG: --enable-load-reader="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042161 4747 flags.go:64] FLAG: --enable-server="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042170 4747 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042188 4747 flags.go:64] FLAG: --event-burst="100" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042197 4747 flags.go:64] FLAG: --event-qps="50" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042206 4747 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042215 4747 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042224 4747 flags.go:64] FLAG: --eviction-hard="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042234 4747 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042243 4747 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042252 4747 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042261 4747 flags.go:64] FLAG: --eviction-soft="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042270 4747 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042278 4747 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042287 4747 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042296 4747 flags.go:64] FLAG: --experimental-mounter-path="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042305 4747 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042314 4747 flags.go:64] FLAG: --fail-swap-on="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042323 4747 flags.go:64] FLAG: --feature-gates="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042334 4747 flags.go:64] FLAG: --file-check-frequency="20s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042343 4747 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042352 4747 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042361 4747 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042370 4747 flags.go:64] FLAG: --healthz-port="10248" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042379 4747 flags.go:64] FLAG: --help="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042389 4747 flags.go:64] FLAG: --hostname-override="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042397 4747 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042407 4747 flags.go:64] FLAG: --http-check-frequency="20s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042415 4747 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042425 4747 flags.go:64] FLAG: --image-credential-provider-config="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042433 4747 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042442 4747 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042450 4747 flags.go:64] FLAG: --image-service-endpoint="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042459 4747 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042468 4747 flags.go:64] FLAG: --kube-api-burst="100" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042477 4747 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042486 4747 flags.go:64] FLAG: --kube-api-qps="50" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042496 4747 flags.go:64] FLAG: --kube-reserved="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042506 4747 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042515 4747 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042524 4747 flags.go:64] FLAG: --kubelet-cgroups="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042533 4747 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042542 4747 flags.go:64] FLAG: --lock-file="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042550 4747 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042559 4747 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042568 4747 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042581 4747 flags.go:64] FLAG: --log-json-split-stream="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042590 4747 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042599 4747 flags.go:64] FLAG: --log-text-split-stream="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042608 4747 flags.go:64] FLAG: --logging-format="text" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042616 4747 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042626 4747 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042635 4747 flags.go:64] FLAG: --manifest-url="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042643 4747 flags.go:64] FLAG: --manifest-url-header="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042655 4747 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042664 4747 flags.go:64] FLAG: --max-open-files="1000000" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042674 4747 flags.go:64] FLAG: --max-pods="110" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042684 4747 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042692 4747 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042701 4747 flags.go:64] FLAG: --memory-manager-policy="None" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042710 4747 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042719 4747 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042728 4747 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042737 4747 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042783 4747 flags.go:64] FLAG: --node-status-max-images="50" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042793 4747 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042802 4747 flags.go:64] FLAG: --oom-score-adj="-999" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042812 4747 flags.go:64] FLAG: --pod-cidr="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042822 4747 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042835 4747 flags.go:64] FLAG: --pod-manifest-path="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042844 4747 flags.go:64] FLAG: --pod-max-pids="-1" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042853 4747 flags.go:64] FLAG: --pods-per-core="0" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042862 4747 flags.go:64] FLAG: --port="10250" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042872 4747 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042881 4747 flags.go:64] FLAG: --provider-id="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042890 4747 flags.go:64] FLAG: --qos-reserved="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042899 4747 flags.go:64] FLAG: --read-only-port="10255" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042907 4747 flags.go:64] FLAG: --register-node="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042917 4747 flags.go:64] FLAG: --register-schedulable="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042926 4747 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042940 4747 flags.go:64] FLAG: --registry-burst="10" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042948 4747 flags.go:64] FLAG: --registry-qps="5" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042957 4747 flags.go:64] FLAG: --reserved-cpus="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042966 4747 flags.go:64] FLAG: --reserved-memory="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042976 4747 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042986 4747 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.042995 4747 flags.go:64] FLAG: --rotate-certificates="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043003 4747 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043013 4747 flags.go:64] FLAG: --runonce="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043021 4747 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043030 4747 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043040 4747 flags.go:64] FLAG: --seccomp-default="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043048 4747 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043058 4747 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043067 4747 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043077 4747 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043086 4747 flags.go:64] FLAG: --storage-driver-password="root" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043095 4747 flags.go:64] FLAG: --storage-driver-secure="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043103 4747 flags.go:64] FLAG: --storage-driver-table="stats" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043113 4747 flags.go:64] FLAG: --storage-driver-user="root" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043122 4747 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043132 4747 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043141 4747 flags.go:64] FLAG: --system-cgroups="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043150 4747 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043164 4747 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043172 4747 flags.go:64] FLAG: --tls-cert-file="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043181 4747 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043199 4747 flags.go:64] FLAG: --tls-min-version="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043208 4747 flags.go:64] FLAG: --tls-private-key-file="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043219 4747 flags.go:64] FLAG: --topology-manager-policy="none" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043228 4747 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043237 4747 flags.go:64] FLAG: --topology-manager-scope="container" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043246 4747 flags.go:64] FLAG: --v="2" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043257 4747 flags.go:64] FLAG: --version="false" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043268 4747 flags.go:64] FLAG: --vmodule="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043278 4747 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.043287 4747 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043524 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043534 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043542 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043552 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043560 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043568 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043577 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043584 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043594 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043601 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043612 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043622 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043630 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043640 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043649 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043659 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043667 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043677 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043686 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043694 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043701 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043709 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043717 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043725 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043733 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043740 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043771 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043780 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043789 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043797 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043805 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043812 4747 feature_gate.go:330] unrecognized feature gate: Example Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043820 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043828 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043839 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043849 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043857 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043866 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043875 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043884 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043891 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043899 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043936 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043944 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043952 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043959 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043967 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043983 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043991 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.043999 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044008 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044015 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044025 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044035 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044042 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044050 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044058 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044066 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044073 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044082 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044092 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044101 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044110 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044118 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044127 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044136 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044143 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044151 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044159 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044166 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.044174 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.044197 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.054408 4747 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.054477 4747 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054613 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054637 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054645 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054653 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054660 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054667 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054674 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054680 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054687 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054694 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054702 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054710 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054716 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054724 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054731 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054738 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054744 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054776 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054784 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054789 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054794 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054799 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054807 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054817 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054823 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054831 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054839 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054846 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054852 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054859 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054865 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054872 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054879 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054886 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054905 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054912 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054920 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054927 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054932 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054938 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054943 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054949 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054954 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054959 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054965 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054971 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054978 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054985 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054992 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.054998 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055004 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055011 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055017 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055022 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055028 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055033 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055038 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055044 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055049 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055054 4747 feature_gate.go:330] unrecognized feature gate: Example Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055059 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055065 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055070 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055075 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055081 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055087 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055094 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055100 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055106 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055113 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055131 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.055144 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055408 4747 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055420 4747 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055427 4747 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055433 4747 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055439 4747 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055445 4747 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055451 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055457 4747 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055462 4747 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055477 4747 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055515 4747 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055521 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055527 4747 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055533 4747 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055538 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055544 4747 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055551 4747 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055558 4747 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055564 4747 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055570 4747 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055577 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055584 4747 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055590 4747 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055598 4747 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055605 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055612 4747 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055619 4747 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055625 4747 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055631 4747 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055637 4747 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055644 4747 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055651 4747 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055658 4747 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055665 4747 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055687 4747 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055693 4747 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055698 4747 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055703 4747 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055708 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055713 4747 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055719 4747 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055724 4747 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055729 4747 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055735 4747 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055740 4747 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055764 4747 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055769 4747 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055775 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055780 4747 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055785 4747 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055790 4747 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055795 4747 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055801 4747 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055809 4747 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055815 4747 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055820 4747 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055826 4747 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055832 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055837 4747 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055843 4747 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055848 4747 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055853 4747 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055858 4747 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055866 4747 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055872 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055877 4747 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055883 4747 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055888 4747 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055912 4747 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055918 4747 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.055933 4747 feature_gate.go:330] unrecognized feature gate: Example Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.055941 4747 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.056912 4747 server.go:940] "Client rotation is on, will bootstrap in background" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.064057 4747 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.064157 4747 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.065448 4747 server.go:997] "Starting client certificate rotation" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.065482 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.066536 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-11 16:30:58.856371171 +0000 UTC Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.066711 4747 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1714h14m19.789665393s for next certificate rotation Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.094912 4747 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.098999 4747 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.122710 4747 log.go:25] "Validated CRI v1 runtime API" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.153663 4747 log.go:25] "Validated CRI v1 image API" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.155436 4747 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.162047 4747 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-01-06-13-04-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.162108 4747 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.191250 4747 manager.go:217] Machine: {Timestamp:2025-10-01 06:16:39.188838782 +0000 UTC m=+0.598495851 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:486dc906-2211-4bcf-95f6-cf6c55ee481c BootID:f048b45a-6d3e-4f45-bc91-5a73d3968d47 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:7c:64:df Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:7c:64:df Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:6a:62:af Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:a9:fd:7b Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:7d:64:d1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:45:b2:a1 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:f6:31:39:3b:72:02 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:4a:a6:10:59:23:79 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.191672 4747 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.191897 4747 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.192781 4747 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.192943 4747 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.192975 4747 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.193146 4747 topology_manager.go:138] "Creating topology manager with none policy" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.193155 4747 container_manager_linux.go:303] "Creating device plugin manager" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.193719 4747 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.193764 4747 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.193915 4747 state_mem.go:36] "Initialized new in-memory state store" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.194008 4747 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.197918 4747 kubelet.go:418] "Attempting to sync node with API server" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.197944 4747 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.197970 4747 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.197983 4747 kubelet.go:324] "Adding apiserver pod source" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.197996 4747 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.201245 4747 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.201993 4747 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.204716 4747 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.205352 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.205382 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.205812 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.205823 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206166 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206185 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206191 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206197 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206207 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206214 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206220 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206231 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206247 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206254 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206540 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.206551 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.208663 4747 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.210839 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.212581 4747 server.go:1280] "Started kubelet" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.213609 4747 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.213631 4747 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.214440 4747 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 01 06:16:39 crc systemd[1]: Started Kubernetes Kubelet. Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.215558 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.215640 4747 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.215738 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 12:45:37.833718892 +0000 UTC Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.215887 4747 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1038h28m58.617836635s for next certificate rotation Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.215948 4747 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.216715 4747 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.216849 4747 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.216877 4747 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.217115 4747 factory.go:55] Registering systemd factory Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.217134 4747 factory.go:221] Registration of the systemd container factory successfully Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.217452 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="200ms" Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.218048 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.218101 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.218221 4747 server.go:460] "Adding debug handlers to kubelet server" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.219622 4747 factory.go:153] Registering CRI-O factory Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.219645 4747 factory.go:221] Registration of the crio container factory successfully Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.219703 4747 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.219727 4747 factory.go:103] Registering Raw factory Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.219742 4747 manager.go:1196] Started watching for new ooms in manager Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.218185 4747 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.51:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186a497b7f9ec7f8 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-01 06:16:39.212517368 +0000 UTC m=+0.622174467,LastTimestamp:2025-10-01 06:16:39.212517368 +0000 UTC m=+0.622174467,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.221095 4747 manager.go:319] Starting recovery of all containers Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234048 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234147 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234171 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234188 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234206 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234219 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234236 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234251 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234271 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234284 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234298 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234316 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234332 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234353 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234367 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234388 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234403 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234421 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234433 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234446 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234463 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234478 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234495 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234506 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234519 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234537 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.234553 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235057 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235119 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235135 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235156 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235174 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235189 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235206 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235218 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235234 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235245 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235263 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235275 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235285 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235298 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235308 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235320 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235333 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235344 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235356 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235406 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235419 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235433 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235445 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235458 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235468 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235488 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235500 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235514 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235530 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235542 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235553 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235562 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235574 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235585 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235594 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235610 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235659 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235673 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235690 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235703 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235716 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235725 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235770 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235785 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235796 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235819 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235836 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235862 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235878 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235890 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235961 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235980 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.235992 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236007 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236064 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236089 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236102 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236119 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236135 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236147 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236169 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236182 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236194 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236209 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236223 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236243 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236256 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236271 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236289 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236302 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236321 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.236332 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238039 4747 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238061 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238072 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238105 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238116 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238126 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238146 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238199 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238215 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238233 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238253 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238284 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238302 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238314 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238332 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238351 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238363 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238374 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238385 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238400 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238425 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238443 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238455 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238466 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238481 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238490 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238502 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238518 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238531 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238546 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238559 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238572 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238588 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238601 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238618 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238633 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238647 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238666 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238682 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238701 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238715 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238727 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238741 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238774 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238791 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238805 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238816 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238831 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238844 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238857 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238874 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238886 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238905 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238917 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238929 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238944 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238957 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.238990 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239004 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239019 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239034 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239050 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239066 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239081 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239093 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239110 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239123 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239138 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239165 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239181 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239199 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239217 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239233 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239247 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239259 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239288 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239304 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239323 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239336 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239348 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239362 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239371 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239381 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239393 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239402 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239414 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239424 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239434 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239447 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239459 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239474 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239491 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239503 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239518 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239531 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239546 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239560 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239572 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239588 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239602 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239619 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239630 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239644 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239676 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239694 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239714 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239727 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239786 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239805 4747 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239816 4747 reconstruct.go:97] "Volume reconstruction finished" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.239826 4747 reconciler.go:26] "Reconciler: start to sync state" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.255644 4747 manager.go:324] Recovery completed Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.270681 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.272650 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.272695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.272708 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.273087 4747 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.274061 4747 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.274081 4747 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.274102 4747 state_mem.go:36] "Initialized new in-memory state store" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.275421 4747 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.275479 4747 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.275503 4747 kubelet.go:2335] "Starting kubelet main sync loop" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.275790 4747 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.276213 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.276287 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.300146 4747 policy_none.go:49] "None policy: Start" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.301316 4747 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.301345 4747 state_mem.go:35] "Initializing new in-memory state store" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.316720 4747 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.375143 4747 manager.go:334] "Starting Device Plugin manager" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.375584 4747 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.375600 4747 server.go:79] "Starting device plugin registration server" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.376045 4747 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.376082 4747 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.376066 4747 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.376222 4747 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.376312 4747 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.376321 4747 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.387152 4747 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.418890 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="400ms" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.476463 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.477822 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.477891 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.477908 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.477945 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.478494 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.577058 4747 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.577306 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.579378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.579474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.579501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.579722 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.579919 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.579984 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.581397 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.581466 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.581484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.581541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.581587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.581613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.582102 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.583187 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.583311 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.583825 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.583888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.583905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.584125 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.585144 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.585209 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.586687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.586739 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.586770 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.586836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.586899 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.586916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.587173 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.587332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.587371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.587392 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.588871 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.588886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.588929 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.589001 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.589028 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.589381 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.589449 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.595459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.595501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.595519 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.595850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.595895 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.595918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.645681 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.645803 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.645857 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.645904 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.645951 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.645995 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646059 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646111 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646167 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646218 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646261 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646303 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646354 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646418 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.646476 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.679178 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.680533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.680651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.680674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.680737 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.681410 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.747928 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748053 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748094 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748147 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748178 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748233 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748263 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748260 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748318 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748347 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748357 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748451 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748459 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748483 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748511 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748514 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748545 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748261 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748581 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748579 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748660 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748609 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748666 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748667 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748605 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748845 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748898 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748909 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.748692 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.749065 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: E1001 06:16:39.819916 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="800ms" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.916844 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.943158 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.960248 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.966267 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-26212f949779aaa6cc6098d0e47684937b868ab7269fb6610ca3bdc50a9e3a4e WatchSource:0}: Error finding container 26212f949779aaa6cc6098d0e47684937b868ab7269fb6610ca3bdc50a9e3a4e: Status 404 returned error can't find the container with id 26212f949779aaa6cc6098d0e47684937b868ab7269fb6610ca3bdc50a9e3a4e Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.983679 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.984867 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-45e20ec6a355317f57bbbd7db341cecebcd624f93c3986c7a8dd34c697663d3f WatchSource:0}: Error finding container 45e20ec6a355317f57bbbd7db341cecebcd624f93c3986c7a8dd34c697663d3f: Status 404 returned error can't find the container with id 45e20ec6a355317f57bbbd7db341cecebcd624f93c3986c7a8dd34c697663d3f Oct 01 06:16:39 crc kubenswrapper[4747]: W1001 06:16:39.987478 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-eed7d4958405df53ba97f7b30a29d18735d2c6d92d81672ee4b3594492647a33 WatchSource:0}: Error finding container eed7d4958405df53ba97f7b30a29d18735d2c6d92d81672ee4b3594492647a33: Status 404 returned error can't find the container with id eed7d4958405df53ba97f7b30a29d18735d2c6d92d81672ee4b3594492647a33 Oct 01 06:16:39 crc kubenswrapper[4747]: I1001 06:16:39.992453 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:40 crc kubenswrapper[4747]: W1001 06:16:40.000164 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-e4895b46c8c849d7abdca861d84bfacaf11aee0dba46b65d1a1b2f2d9376c4ce WatchSource:0}: Error finding container e4895b46c8c849d7abdca861d84bfacaf11aee0dba46b65d1a1b2f2d9376c4ce: Status 404 returned error can't find the container with id e4895b46c8c849d7abdca861d84bfacaf11aee0dba46b65d1a1b2f2d9376c4ce Oct 01 06:16:40 crc kubenswrapper[4747]: W1001 06:16:40.024000 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-21fa019368cde294f8a4fd242d8a0376517a60ee12249f4cfa0cec2f86ce8be8 WatchSource:0}: Error finding container 21fa019368cde294f8a4fd242d8a0376517a60ee12249f4cfa0cec2f86ce8be8: Status 404 returned error can't find the container with id 21fa019368cde294f8a4fd242d8a0376517a60ee12249f4cfa0cec2f86ce8be8 Oct 01 06:16:40 crc kubenswrapper[4747]: W1001 06:16:40.049314 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:40 crc kubenswrapper[4747]: E1001 06:16:40.049493 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.082478 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.083816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.083873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.083883 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.083908 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 06:16:40 crc kubenswrapper[4747]: E1001 06:16:40.084484 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Oct 01 06:16:40 crc kubenswrapper[4747]: W1001 06:16:40.089989 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:40 crc kubenswrapper[4747]: E1001 06:16:40.090086 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.212809 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.280054 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e4895b46c8c849d7abdca861d84bfacaf11aee0dba46b65d1a1b2f2d9376c4ce"} Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.281295 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"eed7d4958405df53ba97f7b30a29d18735d2c6d92d81672ee4b3594492647a33"} Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.282732 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"45e20ec6a355317f57bbbd7db341cecebcd624f93c3986c7a8dd34c697663d3f"} Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.287607 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"26212f949779aaa6cc6098d0e47684937b868ab7269fb6610ca3bdc50a9e3a4e"} Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.289456 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"21fa019368cde294f8a4fd242d8a0376517a60ee12249f4cfa0cec2f86ce8be8"} Oct 01 06:16:40 crc kubenswrapper[4747]: W1001 06:16:40.342201 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:40 crc kubenswrapper[4747]: E1001 06:16:40.342305 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Oct 01 06:16:40 crc kubenswrapper[4747]: E1001 06:16:40.621058 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="1.6s" Oct 01 06:16:40 crc kubenswrapper[4747]: W1001 06:16:40.764461 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:40 crc kubenswrapper[4747]: E1001 06:16:40.764569 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.884693 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.886192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.886250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.886263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:40 crc kubenswrapper[4747]: I1001 06:16:40.886322 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 06:16:40 crc kubenswrapper[4747]: E1001 06:16:40.886938 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.213149 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.296908 4747 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5" exitCode=0 Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.297008 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5"} Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.297055 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.300968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.301015 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.301033 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.303311 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6"} Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.303361 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2"} Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.303374 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7"} Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.305228 4747 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146" exitCode=0 Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.305286 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146"} Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.305421 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.306623 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.306659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.306674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.307744 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428" exitCode=0 Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.307858 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428"} Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.307878 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.309175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.309230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.309249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.309911 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687" exitCode=0 Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.309964 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687"} Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.310027 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.310611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.310640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.310655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.311882 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.312845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.312867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:41 crc kubenswrapper[4747]: I1001 06:16:41.312877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.211978 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Oct 01 06:16:42 crc kubenswrapper[4747]: E1001 06:16:42.221473 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="3.2s" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.318532 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.318611 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.318628 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.318780 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.320352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.320413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.320431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.321710 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6" exitCode=0 Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.321805 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.321826 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.322525 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.322546 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.322557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.326573 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.326596 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.326606 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.326616 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.328569 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3f5e8ba6a4048e7b3b7609ed4aa459e35520f9de4ed46ad68d1de6ad7c41746e"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.328704 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.330425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.330455 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.330468 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.331480 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a"} Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.331565 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.332405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.332446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.332457 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.487878 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.491226 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.491267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.491280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:42 crc kubenswrapper[4747]: I1001 06:16:42.491307 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 06:16:42 crc kubenswrapper[4747]: E1001 06:16:42.491858 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.338892 4747 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da" exitCode=0 Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.339035 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da"} Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.339116 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.340765 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.340793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.340802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.343034 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.343036 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb"} Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.343079 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.343101 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.343178 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.343196 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344204 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344525 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344736 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.344852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.473465 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:43 crc kubenswrapper[4747]: I1001 06:16:43.473833 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.351660 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82"} Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.351721 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1"} Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.351731 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.351809 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.351825 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.351743 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224"} Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.353359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.353387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.353412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.353431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.353440 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:44 crc kubenswrapper[4747]: I1001 06:16:44.353449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.361306 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.361393 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.361982 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81"} Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.362063 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a"} Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.362089 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.362791 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.362847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.362865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.363592 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.363658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.363683 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.692421 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.694212 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.694273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.694287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.694318 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.973390 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.973538 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.974734 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.974811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:45 crc kubenswrapper[4747]: I1001 06:16:45.974824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.281466 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.364175 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.364236 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.364288 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.365382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.365420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.365432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.365870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.365940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:46 crc kubenswrapper[4747]: I1001 06:16:46.365952 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.145151 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.145420 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.146564 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.146591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.146600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.898072 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.898316 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.899563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.899611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:47 crc kubenswrapper[4747]: I1001 06:16:47.899628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:48 crc kubenswrapper[4747]: I1001 06:16:48.973826 4747 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 06:16:48 crc kubenswrapper[4747]: I1001 06:16:48.973911 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.160737 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.160992 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.162513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.162572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.162591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.362305 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.362491 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.363989 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.364053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:49 crc kubenswrapper[4747]: I1001 06:16:49.364071 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:49 crc kubenswrapper[4747]: E1001 06:16:49.387269 4747 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.237186 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.237476 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.239082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.239151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.239172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.241796 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.378900 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.380250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.380319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.380338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.387220 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.632507 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.632745 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.634456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.634495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:51 crc kubenswrapper[4747]: I1001 06:16:51.634511 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:52 crc kubenswrapper[4747]: I1001 06:16:52.380342 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:16:52 crc kubenswrapper[4747]: I1001 06:16:52.381539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:16:52 crc kubenswrapper[4747]: I1001 06:16:52.381625 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:16:52 crc kubenswrapper[4747]: I1001 06:16:52.381647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:16:52 crc kubenswrapper[4747]: W1001 06:16:52.702694 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 06:16:52 crc kubenswrapper[4747]: I1001 06:16:52.702875 4747 trace.go:236] Trace[2049950784]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 06:16:42.700) (total time: 10001ms): Oct 01 06:16:52 crc kubenswrapper[4747]: Trace[2049950784]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:16:52.702) Oct 01 06:16:52 crc kubenswrapper[4747]: Trace[2049950784]: [10.001927159s] [10.001927159s] END Oct 01 06:16:52 crc kubenswrapper[4747]: E1001 06:16:52.702910 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 06:16:52 crc kubenswrapper[4747]: W1001 06:16:52.750950 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 06:16:52 crc kubenswrapper[4747]: I1001 06:16:52.751087 4747 trace.go:236] Trace[287912871]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 06:16:42.749) (total time: 10001ms): Oct 01 06:16:52 crc kubenswrapper[4747]: Trace[287912871]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:16:52.750) Oct 01 06:16:52 crc kubenswrapper[4747]: Trace[287912871]: [10.001260925s] [10.001260925s] END Oct 01 06:16:52 crc kubenswrapper[4747]: E1001 06:16:52.751122 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 06:16:52 crc kubenswrapper[4747]: W1001 06:16:52.941430 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 06:16:52 crc kubenswrapper[4747]: I1001 06:16:52.941669 4747 trace.go:236] Trace[228545633]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 06:16:42.939) (total time: 10002ms): Oct 01 06:16:52 crc kubenswrapper[4747]: Trace[228545633]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:16:52.941) Oct 01 06:16:52 crc kubenswrapper[4747]: Trace[228545633]: [10.002053409s] [10.002053409s] END Oct 01 06:16:52 crc kubenswrapper[4747]: E1001 06:16:52.941806 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 06:16:53 crc kubenswrapper[4747]: I1001 06:16:53.214279 4747 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 01 06:16:53 crc kubenswrapper[4747]: W1001 06:16:53.439999 4747 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Oct 01 06:16:53 crc kubenswrapper[4747]: I1001 06:16:53.440937 4747 trace.go:236] Trace[1823825424]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Oct-2025 06:16:43.438) (total time: 10002ms): Oct 01 06:16:53 crc kubenswrapper[4747]: Trace[1823825424]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:16:53.439) Oct 01 06:16:53 crc kubenswrapper[4747]: Trace[1823825424]: [10.002752694s] [10.002752694s] END Oct 01 06:16:53 crc kubenswrapper[4747]: E1001 06:16:53.441004 4747 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Oct 01 06:16:53 crc kubenswrapper[4747]: I1001 06:16:53.473694 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 06:16:53 crc kubenswrapper[4747]: I1001 06:16:53.473850 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 01 06:16:53 crc kubenswrapper[4747]: I1001 06:16:53.607833 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 01 06:16:53 crc kubenswrapper[4747]: I1001 06:16:53.607907 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 01 06:16:57 crc kubenswrapper[4747]: I1001 06:16:57.081173 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 01 06:16:57 crc kubenswrapper[4747]: I1001 06:16:57.081242 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 01 06:16:57 crc kubenswrapper[4747]: I1001 06:16:57.451010 4747 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 01 06:16:57 crc kubenswrapper[4747]: I1001 06:16:57.791439 4747 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 01 06:16:57 crc kubenswrapper[4747]: I1001 06:16:57.898486 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 01 06:16:57 crc kubenswrapper[4747]: I1001 06:16:57.898551 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.210310 4747 apiserver.go:52] "Watching apiserver" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.214382 4747 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.214811 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.215237 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.215342 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.215607 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.215657 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.215648 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.215765 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.216592 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.216647 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.216715 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.217438 4747 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.217694 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.218097 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.218644 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.219025 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.219027 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.219367 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.219375 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.219807 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.220436 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.252524 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.264124 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.277293 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.287381 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.302630 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.318492 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.330547 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.382572 4747 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.480637 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.481542 4747 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.481614 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.487191 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.498921 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.506564 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.515080 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.525534 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.541663 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.552576 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.566794 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.582486 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.595066 4747 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.598295 4747 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.598722 4747 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.600655 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.613037 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.629036 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.647024 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.660255 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.669333 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699302 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699362 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699397 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699431 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699478 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699512 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699543 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699573 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699602 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699634 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699666 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699700 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699807 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699812 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699843 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699879 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699912 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699954 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.699989 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700020 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700082 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700115 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700144 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700172 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700198 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700200 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700244 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700262 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700254 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700265 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700394 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700340 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700442 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700460 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700474 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700476 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700491 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700506 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700523 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700562 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700579 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700596 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700612 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700653 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700671 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700687 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700703 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700721 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700737 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700771 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700789 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700804 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700822 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700837 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700853 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700868 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700883 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700898 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700914 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700928 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700943 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700957 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700971 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.700985 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701004 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701004 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701042 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701059 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701073 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701091 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701084 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701093 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701108 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701180 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701217 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701248 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701258 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701300 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701290 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701379 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701402 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701412 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701437 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701446 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701515 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701544 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701559 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701599 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701636 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701634 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701684 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701720 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701798 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701834 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701868 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701897 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701901 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701927 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.701961 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702003 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702039 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702059 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702080 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702076 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702163 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702199 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702231 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702240 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702271 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702288 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702304 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702337 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702368 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702401 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702405 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702440 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702472 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702506 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702538 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702569 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702600 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702632 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702667 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702699 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702731 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702789 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702841 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702873 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702907 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702938 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702969 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703001 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703085 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703122 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703155 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703187 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703218 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703251 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703284 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703317 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703351 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703385 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703421 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703454 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703489 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703521 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703559 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703591 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703623 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703793 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703829 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703863 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703895 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703927 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703959 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.703992 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704028 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704060 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704098 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704138 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704176 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704224 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704257 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704289 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704323 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704357 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704390 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704424 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704459 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704492 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704527 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704561 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704594 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704628 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704662 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704695 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704729 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704789 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704825 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704859 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704892 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704926 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.704962 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705002 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705037 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705072 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705104 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705138 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705171 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705204 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705238 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707035 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707080 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707116 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707150 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707185 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707221 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707254 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707290 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707324 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707504 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707545 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707580 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707623 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707659 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707697 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707741 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707820 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707857 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707890 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707923 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707958 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708118 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708159 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708196 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708230 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708265 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708302 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708338 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708402 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708447 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708561 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708609 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708649 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708771 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708811 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708858 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708896 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708934 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709008 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709082 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709118 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709177 4747 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709200 4747 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709222 4747 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709265 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709286 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709307 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709327 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709347 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709367 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709386 4747 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709406 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709428 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709447 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709468 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709520 4747 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709542 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709564 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709583 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709604 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709624 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709645 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709665 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709685 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.742191 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.742996 4747 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.746142 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702398 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702493 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.702657 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705381 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705731 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705742 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705969 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705993 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.705990 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.706006 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.706040 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.706309 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.706188 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707174 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707347 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.707119 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708377 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708741 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.708773 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709212 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709334 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709481 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709586 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709537 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709605 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.709940 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.710061 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.710174 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.710266 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:16:59.210239121 +0000 UTC m=+20.619896200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.762164 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.757010 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.710402 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.710521 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.710592 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.710708 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.710717 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.710782 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.711078 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.711133 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.716211 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.716924 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.716944 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.717154 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.717447 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.718013 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.718362 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.718525 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.718544 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.718917 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.719085 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.719258 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.719574 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.719578 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.719803 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.719836 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.720115 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.720402 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.720545 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.721262 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.721474 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.721639 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.728729 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.729597 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.729789 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.729925 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.730280 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.730497 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.731978 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.732177 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.732337 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.732398 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.732517 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.732736 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.732956 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.734025 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.734319 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.734393 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.734622 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.734813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.735001 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.735190 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.735221 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.735422 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.735976 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.736009 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.736035 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.736618 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.736766 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.737153 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.737550 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.737583 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.737681 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.737876 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.737953 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.738066 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.738189 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.735431 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.738374 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.738835 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.738970 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.739536 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740294 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740412 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740451 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740497 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740508 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740551 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740594 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740599 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.740944 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.741109 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.741134 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.741190 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.729863 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.741350 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.741713 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.741941 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.742089 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.742422 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.742568 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.742735 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.742973 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.743626 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.743865 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.745197 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.745361 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.745558 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.745814 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.746706 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.746769 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.748813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.749580 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.749637 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.751314 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.751830 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.752896 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.753326 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.754034 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.754647 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.754876 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.755022 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.756165 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.756303 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.756365 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.756861 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.757116 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.757284 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.757506 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.757838 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.758222 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.758874 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.759171 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.759909 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.760130 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.760200 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.761339 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.761559 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.762069 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.762088 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.762634 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.764489 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.765074 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.772160 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:16:59.27212216 +0000 UTC m=+20.681779219 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.772214 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:16:59.272204902 +0000 UTC m=+20.681861971 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.774237 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.774398 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.775408 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.775987 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.778011 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.778066 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.778646 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.778800 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-mpx8t"] Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.778836 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.778865 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.778880 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.778948 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 06:16:59.27892671 +0000 UTC m=+20.688583779 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.779042 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.779061 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mpx8t" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.779100 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.779124 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.779346 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.780414 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.780432 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.780445 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:16:58 crc kubenswrapper[4747]: E1001 06:16:58.780494 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 06:16:59.280480869 +0000 UTC m=+20.690137918 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.781554 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.781937 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.782317 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.782407 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.786454 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.789606 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.792703 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.795485 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.799965 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.801054 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.802654 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.808936 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.810376 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.810414 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.810499 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.810719 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.810862 4747 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.810962 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.810983 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811001 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811019 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811035 4747 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811051 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811067 4747 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811083 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811099 4747 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811116 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811134 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811150 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811183 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811201 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811217 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811233 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811248 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811264 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811282 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811298 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811316 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811334 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811351 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811367 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811382 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811398 4747 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811414 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811430 4747 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811446 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811462 4747 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811478 4747 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811493 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811509 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811524 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811539 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811556 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811573 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811591 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811608 4747 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811625 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811655 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811672 4747 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811688 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811704 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811720 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811736 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811785 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811802 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811818 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811833 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811848 4747 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811863 4747 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811877 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811893 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811907 4747 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811921 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811937 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811965 4747 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.811983 4747 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812000 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812018 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812035 4747 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812052 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812068 4747 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812086 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812102 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812118 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812135 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812151 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812178 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812193 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812209 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812224 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812238 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812255 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812271 4747 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812288 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812304 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812320 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812335 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812349 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812364 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812378 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812393 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812417 4747 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812432 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812447 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812462 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812478 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812493 4747 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812509 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812526 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812540 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812555 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812570 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812584 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812601 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812615 4747 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812630 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812646 4747 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812661 4747 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812676 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812691 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812707 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812721 4747 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812737 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812775 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812791 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812807 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812832 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812849 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812864 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812879 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812894 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812909 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812927 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812943 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812958 4747 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812973 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.812988 4747 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813002 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813026 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813040 4747 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813057 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813072 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813088 4747 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813103 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813118 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813133 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813149 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813165 4747 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813182 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813198 4747 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813212 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813227 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813240 4747 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813255 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813269 4747 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813284 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813299 4747 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813315 4747 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813330 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813346 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813361 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813376 4747 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813390 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813406 4747 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813420 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813434 4747 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813449 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813463 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813478 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813491 4747 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813506 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813519 4747 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813535 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813551 4747 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813566 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813580 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813595 4747 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813609 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813628 4747 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813644 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813660 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813676 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813719 4747 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813735 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813772 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813787 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.813804 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.820343 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.828368 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.837804 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.838860 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.849912 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.850366 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.862330 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.862667 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:58 crc kubenswrapper[4747]: W1001 06:16:58.873922 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-3047f71287349821cdb1b31c73572cd8b78b8077e67ceaee47681b7aea69d96f WatchSource:0}: Error finding container 3047f71287349821cdb1b31c73572cd8b78b8077e67ceaee47681b7aea69d96f: Status 404 returned error can't find the container with id 3047f71287349821cdb1b31c73572cd8b78b8077e67ceaee47681b7aea69d96f Oct 01 06:16:58 crc kubenswrapper[4747]: W1001 06:16:58.875434 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-7dfa4ddcc597b0ce62258bc7affd8fd2b525f3bf96033643106b00da91723e69 WatchSource:0}: Error finding container 7dfa4ddcc597b0ce62258bc7affd8fd2b525f3bf96033643106b00da91723e69: Status 404 returned error can't find the container with id 7dfa4ddcc597b0ce62258bc7affd8fd2b525f3bf96033643106b00da91723e69 Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.914525 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggrj5\" (UniqueName: \"kubernetes.io/projected/d323e332-ebe0-4a35-a811-f484557e7d7a-kube-api-access-ggrj5\") pod \"node-resolver-mpx8t\" (UID: \"d323e332-ebe0-4a35-a811-f484557e7d7a\") " pod="openshift-dns/node-resolver-mpx8t" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.914576 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d323e332-ebe0-4a35-a811-f484557e7d7a-hosts-file\") pod \"node-resolver-mpx8t\" (UID: \"d323e332-ebe0-4a35-a811-f484557e7d7a\") " pod="openshift-dns/node-resolver-mpx8t" Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.946581 4747 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.974599 4747 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 06:16:58 crc kubenswrapper[4747]: I1001 06:16:58.974657 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.014932 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggrj5\" (UniqueName: \"kubernetes.io/projected/d323e332-ebe0-4a35-a811-f484557e7d7a-kube-api-access-ggrj5\") pod \"node-resolver-mpx8t\" (UID: \"d323e332-ebe0-4a35-a811-f484557e7d7a\") " pod="openshift-dns/node-resolver-mpx8t" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.014981 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d323e332-ebe0-4a35-a811-f484557e7d7a-hosts-file\") pod \"node-resolver-mpx8t\" (UID: \"d323e332-ebe0-4a35-a811-f484557e7d7a\") " pod="openshift-dns/node-resolver-mpx8t" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.015063 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d323e332-ebe0-4a35-a811-f484557e7d7a-hosts-file\") pod \"node-resolver-mpx8t\" (UID: \"d323e332-ebe0-4a35-a811-f484557e7d7a\") " pod="openshift-dns/node-resolver-mpx8t" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.031474 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggrj5\" (UniqueName: \"kubernetes.io/projected/d323e332-ebe0-4a35-a811-f484557e7d7a-kube-api-access-ggrj5\") pod \"node-resolver-mpx8t\" (UID: \"d323e332-ebe0-4a35-a811-f484557e7d7a\") " pod="openshift-dns/node-resolver-mpx8t" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.109538 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-mpx8t" Oct 01 06:16:59 crc kubenswrapper[4747]: W1001 06:16:59.118862 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd323e332_ebe0_4a35_a811_f484557e7d7a.slice/crio-d5e1bf4e4d3a5317fbaded7ffc1582fc2f997c6d709ca0c26d3abe2cd818b81d WatchSource:0}: Error finding container d5e1bf4e4d3a5317fbaded7ffc1582fc2f997c6d709ca0c26d3abe2cd818b81d: Status 404 returned error can't find the container with id d5e1bf4e4d3a5317fbaded7ffc1582fc2f997c6d709ca0c26d3abe2cd818b81d Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.125113 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-gh9dg"] Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.125451 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.129489 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.129519 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.129811 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.130033 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-pvmkj"] Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.130695 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.130818 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.130907 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.131017 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p48hw"] Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.134551 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.134857 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-w5wxn"] Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.135149 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.135732 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-4g26h"] Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.135960 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.137239 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.137305 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.137367 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.137453 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.137834 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.137451 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.137456 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.137685 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.138902 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.140039 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.140307 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.140485 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.140605 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.140695 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.142198 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.148073 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.157122 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.165806 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.172890 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.193586 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216186 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.216262 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:17:00.216247634 +0000 UTC m=+21.625904683 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216206 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216459 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-bin\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216486 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-script-lib\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216501 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-netns\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216516 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p76x\" (UniqueName: \"kubernetes.io/projected/e2f2b780-a19d-4581-92f4-ca25c69a263c-kube-api-access-4p76x\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216531 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zjbg\" (UniqueName: \"kubernetes.io/projected/38849139-d385-42a4-adab-687566065973-kube-api-access-2zjbg\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216548 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-cni-bin\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216566 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-cnibin\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216579 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-socket-dir-parent\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216594 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-etc-kubernetes\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216610 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-netns\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216623 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-ovn\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216637 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-cni-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216664 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-system-cni-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216679 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-os-release\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216693 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-kubelet\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216710 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-node-log\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216731 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-multus-certs\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216774 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/90df9e29-7482-4ab7-84c6-f3029df17a0d-rootfs\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216793 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-etc-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216820 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2f2b780-a19d-4581-92f4-ca25c69a263c-cni-binary-copy\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216839 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-kubelet\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216852 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-log-socket\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216866 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-var-lib-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216881 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216896 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216910 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-config\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216925 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvtj5\" (UniqueName: \"kubernetes.io/projected/90df9e29-7482-4ab7-84c6-f3029df17a0d-kube-api-access-mvtj5\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216938 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-systemd-units\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216952 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-k8s-cni-cncf-io\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216967 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-daemon-config\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216981 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-hostroot\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.216994 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-ovn-kubernetes\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217014 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-systemd\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217032 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/90df9e29-7482-4ab7-84c6-f3029df17a0d-mcd-auth-proxy-config\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217046 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-env-overrides\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217059 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/38849139-d385-42a4-adab-687566065973-ovn-node-metrics-cert\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217074 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-cni-multus\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217087 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-slash\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217102 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-netd\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217115 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-conf-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.217129 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/90df9e29-7482-4ab7-84c6-f3029df17a0d-proxy-tls\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.245352 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.257011 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.265932 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.280201 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.280785 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.281261 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.282041 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.282591 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.283181 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.283661 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.285391 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.285947 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.286532 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.287025 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.287508 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.289064 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.289385 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.289569 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.290447 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.290956 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.291805 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.292310 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.292662 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.293565 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.294118 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.294542 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.295490 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.296041 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.296833 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.297512 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.297813 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.298270 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.300400 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.300912 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.301935 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.302425 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.302940 4747 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.303047 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.305235 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.305919 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.306798 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.307175 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.308638 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.309343 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.310296 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.310970 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.311974 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.312438 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.313442 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.314183 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.315405 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.315891 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.315913 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.316893 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.317552 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318090 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-netns\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318130 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p76x\" (UniqueName: \"kubernetes.io/projected/e2f2b780-a19d-4581-92f4-ca25c69a263c-kube-api-access-4p76x\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318147 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-bin\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-script-lib\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318182 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/57f2f957-14c5-4cef-81ae-b01b1693f15d-cni-binary-copy\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318198 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/57f2f957-14c5-4cef-81ae-b01b1693f15d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318214 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hml2x\" (UniqueName: \"kubernetes.io/projected/57f2f957-14c5-4cef-81ae-b01b1693f15d-kube-api-access-hml2x\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318232 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zjbg\" (UniqueName: \"kubernetes.io/projected/38849139-d385-42a4-adab-687566065973-kube-api-access-2zjbg\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318248 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318266 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-cni-bin\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318281 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-cni-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318294 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-cnibin\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318309 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-socket-dir-parent\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318324 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-etc-kubernetes\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318338 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-netns\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318351 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-ovn\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318364 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-kubelet\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318381 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318395 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-system-cni-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318423 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-os-release\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318437 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-node-log\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318454 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-os-release\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318473 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318488 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-multus-certs\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318502 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/90df9e29-7482-4ab7-84c6-f3029df17a0d-rootfs\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318516 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-etc-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318533 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318548 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jbwx\" (UniqueName: \"kubernetes.io/projected/d04a872f-a6a7-45d3-aa62-be934b7266c2-kube-api-access-2jbwx\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318565 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318580 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2f2b780-a19d-4581-92f4-ca25c69a263c-cni-binary-copy\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318595 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-kubelet\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318608 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-log-socket\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318623 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-var-lib-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318637 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318653 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318667 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-config\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318680 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-k8s-cni-cncf-io\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318694 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-daemon-config\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318708 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvtj5\" (UniqueName: \"kubernetes.io/projected/90df9e29-7482-4ab7-84c6-f3029df17a0d-kube-api-access-mvtj5\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318722 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-systemd-units\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318727 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-cni-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318741 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318775 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-hostroot\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318789 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-ovn-kubernetes\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318804 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-cnibin\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318819 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-systemd\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318834 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-cni-multus\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318849 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/90df9e29-7482-4ab7-84c6-f3029df17a0d-mcd-auth-proxy-config\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318863 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-env-overrides\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318878 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/38849139-d385-42a4-adab-687566065973-ovn-node-metrics-cert\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318894 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-conf-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318907 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-slash\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318920 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-netd\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318937 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/90df9e29-7482-4ab7-84c6-f3029df17a0d-proxy-tls\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318952 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-system-cni-dir\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.319006 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-bin\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.319695 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-cnibin\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.319738 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-socket-dir-parent\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.319776 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-etc-kubernetes\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.319798 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-netns\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.319820 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-ovn\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.319841 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-kubelet\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.319903 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.319939 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:00.319926449 +0000 UTC m=+21.729583498 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.320166 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-system-cni-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.320449 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-os-release\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.320472 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-node-log\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.320535 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.320550 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.320561 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.320589 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:00.320579536 +0000 UTC m=+21.730236665 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.320615 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-multus-certs\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.320641 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/90df9e29-7482-4ab7-84c6-f3029df17a0d-rootfs\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.320663 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-etc-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.320706 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.320727 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:00.320720239 +0000 UTC m=+21.730377288 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321031 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-script-lib\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.318803 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-netns\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321284 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2f2b780-a19d-4581-92f4-ca25c69a263c-cni-binary-copy\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321379 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-systemd-units\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321453 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321675 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-hostroot\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321498 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-cni-bin\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321509 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-ovn-kubernetes\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321520 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-kubelet\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321531 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-log-socket\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.321545 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.321805 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.321821 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.321871 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:00.321853848 +0000 UTC m=+21.731510897 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-slash\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321570 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-netd\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321921 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-run-k8s-cni-cncf-io\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321948 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-host-var-lib-cni-multus\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.321482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-var-lib-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.322102 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-openvswitch\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.322124 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-conf-dir\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.322166 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.322389 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-env-overrides\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.322804 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/e2f2b780-a19d-4581-92f4-ca25c69a263c-multus-daemon-config\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.322816 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-config\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.323015 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/90df9e29-7482-4ab7-84c6-f3029df17a0d-mcd-auth-proxy-config\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.323066 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-systemd\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.323350 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.323956 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.324582 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.325492 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/90df9e29-7482-4ab7-84c6-f3029df17a0d-proxy-tls\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.325702 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.326565 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.327856 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.328503 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.333014 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/38849139-d385-42a4-adab-687566065973-ovn-node-metrics-cert\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.335412 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.338514 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zjbg\" (UniqueName: \"kubernetes.io/projected/38849139-d385-42a4-adab-687566065973-kube-api-access-2zjbg\") pod \"ovnkube-node-p48hw\" (UID: \"38849139-d385-42a4-adab-687566065973\") " pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.339442 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p76x\" (UniqueName: \"kubernetes.io/projected/e2f2b780-a19d-4581-92f4-ca25c69a263c-kube-api-access-4p76x\") pod \"multus-pvmkj\" (UID: \"e2f2b780-a19d-4581-92f4-ca25c69a263c\") " pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.339815 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvtj5\" (UniqueName: \"kubernetes.io/projected/90df9e29-7482-4ab7-84c6-f3029df17a0d-kube-api-access-mvtj5\") pod \"machine-config-daemon-gh9dg\" (UID: \"90df9e29-7482-4ab7-84c6-f3029df17a0d\") " pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.351075 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.370216 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.379557 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.388665 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.399125 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.404055 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mpx8t" event={"ID":"d323e332-ebe0-4a35-a811-f484557e7d7a","Type":"ContainerStarted","Data":"eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609"} Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.404231 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-mpx8t" event={"ID":"d323e332-ebe0-4a35-a811-f484557e7d7a","Type":"ContainerStarted","Data":"d5e1bf4e4d3a5317fbaded7ffc1582fc2f997c6d709ca0c26d3abe2cd818b81d"} Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.405143 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"7dfa4ddcc597b0ce62258bc7affd8fd2b525f3bf96033643106b00da91723e69"} Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.406995 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589"} Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.407121 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a"} Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.407194 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3047f71287349821cdb1b31c73572cd8b78b8077e67ceaee47681b7aea69d96f"} Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.408856 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced"} Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.408899 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4c97d038dadf75b32f4e22fe39a547b8249b768491ff4d67d882dd29e527412b"} Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.414352 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.416609 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.419514 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-cnibin\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.419714 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-system-cni-dir\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.419923 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/57f2f957-14c5-4cef-81ae-b01b1693f15d-cni-binary-copy\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.420505 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/57f2f957-14c5-4cef-81ae-b01b1693f15d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.421168 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hml2x\" (UniqueName: \"kubernetes.io/projected/57f2f957-14c5-4cef-81ae-b01b1693f15d-kube-api-access-hml2x\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.421532 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.422240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-os-release\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.422329 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.422356 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jbwx\" (UniqueName: \"kubernetes.io/projected/d04a872f-a6a7-45d3-aa62-be934b7266c2-kube-api-access-2jbwx\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.421647 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.422496 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs podName:d04a872f-a6a7-45d3-aa62-be934b7266c2 nodeName:}" failed. No retries permitted until 2025-10-01 06:16:59.922474656 +0000 UTC m=+21.332131695 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs") pod "network-metrics-daemon-4g26h" (UID: "d04a872f-a6a7-45d3-aa62-be934b7266c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.419677 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-cnibin\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.419791 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-system-cni-dir\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.422873 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-os-release\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.422010 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/57f2f957-14c5-4cef-81ae-b01b1693f15d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.421533 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/57f2f957-14c5-4cef-81ae-b01b1693f15d-cni-binary-copy\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.423334 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/57f2f957-14c5-4cef-81ae-b01b1693f15d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.431036 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.437380 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hml2x\" (UniqueName: \"kubernetes.io/projected/57f2f957-14c5-4cef-81ae-b01b1693f15d-kube-api-access-hml2x\") pod \"multus-additional-cni-plugins-w5wxn\" (UID: \"57f2f957-14c5-4cef-81ae-b01b1693f15d\") " pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.440239 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jbwx\" (UniqueName: \"kubernetes.io/projected/d04a872f-a6a7-45d3-aa62-be934b7266c2-kube-api-access-2jbwx\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.446834 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.449665 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.456597 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pvmkj" Oct 01 06:16:59 crc kubenswrapper[4747]: W1001 06:16:59.458569 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90df9e29_7482_4ab7_84c6_f3029df17a0d.slice/crio-3c18b150980de7ad90b0a3276d42eb20ee025aec91e0978afac5f5d981cf0a6f WatchSource:0}: Error finding container 3c18b150980de7ad90b0a3276d42eb20ee025aec91e0978afac5f5d981cf0a6f: Status 404 returned error can't find the container with id 3c18b150980de7ad90b0a3276d42eb20ee025aec91e0978afac5f5d981cf0a6f Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.464483 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.464410 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: W1001 06:16:59.469020 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2f2b780_a19d_4581_92f4_ca25c69a263c.slice/crio-a68f9dfd35be09c0a7c70e1b2397ae15dd28cdbabb1d664005673ec6d0c48ca2 WatchSource:0}: Error finding container a68f9dfd35be09c0a7c70e1b2397ae15dd28cdbabb1d664005673ec6d0c48ca2: Status 404 returned error can't find the container with id a68f9dfd35be09c0a7c70e1b2397ae15dd28cdbabb1d664005673ec6d0c48ca2 Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.472634 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" Oct 01 06:16:59 crc kubenswrapper[4747]: W1001 06:16:59.485652 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38849139_d385_42a4_adab_687566065973.slice/crio-c15abffcb32aae9b0c99d930b994d794778fa732156bdf5595f6462cbcbea056 WatchSource:0}: Error finding container c15abffcb32aae9b0c99d930b994d794778fa732156bdf5595f6462cbcbea056: Status 404 returned error can't find the container with id c15abffcb32aae9b0c99d930b994d794778fa732156bdf5595f6462cbcbea056 Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.486621 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.497164 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.511771 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.526806 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.537636 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.577340 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.623291 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.661030 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.697662 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.743408 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.782247 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.832735 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.861304 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.906305 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.927661 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.927864 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: E1001 06:16:59.928120 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs podName:d04a872f-a6a7-45d3-aa62-be934b7266c2 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:00.928097889 +0000 UTC m=+22.337754948 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs") pod "network-metrics-daemon-4g26h" (UID: "d04a872f-a6a7-45d3-aa62-be934b7266c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.941008 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:16:59 crc kubenswrapper[4747]: I1001 06:16:59.979101 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:16:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.024685 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.063499 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.102996 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.145808 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.180619 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.219201 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.230787 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.231161 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:17:02.231118342 +0000 UTC m=+23.640775401 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.276701 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.276736 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.276805 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.276953 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.277132 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.277292 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.284084 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.332413 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.332453 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.332471 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.332494 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332603 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332621 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332620 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332663 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332708 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:02.332690664 +0000 UTC m=+23.742347713 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332632 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332739 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:02.332720595 +0000 UTC m=+23.742377644 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332807 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332858 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332875 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332830 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:02.332812017 +0000 UTC m=+23.742469066 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.332962 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:02.33293964 +0000 UTC m=+23.742596689 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.412793 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.412840 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.412852 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"3c18b150980de7ad90b0a3276d42eb20ee025aec91e0978afac5f5d981cf0a6f"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.414302 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvmkj" event={"ID":"e2f2b780-a19d-4581-92f4-ca25c69a263c","Type":"ContainerStarted","Data":"2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.414327 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvmkj" event={"ID":"e2f2b780-a19d-4581-92f4-ca25c69a263c","Type":"ContainerStarted","Data":"a68f9dfd35be09c0a7c70e1b2397ae15dd28cdbabb1d664005673ec6d0c48ca2"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.416322 4747 generic.go:334] "Generic (PLEG): container finished" podID="57f2f957-14c5-4cef-81ae-b01b1693f15d" containerID="cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058" exitCode=0 Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.416426 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" event={"ID":"57f2f957-14c5-4cef-81ae-b01b1693f15d","Type":"ContainerDied","Data":"cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.416466 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" event={"ID":"57f2f957-14c5-4cef-81ae-b01b1693f15d","Type":"ContainerStarted","Data":"54f31857ece80c9ce60f847e70c17f0e6c0aabf36287b2c2ea7400b08d57609b"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.418203 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d" exitCode=0 Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.418474 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.418542 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"c15abffcb32aae9b0c99d930b994d794778fa732156bdf5595f6462cbcbea056"} Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.435196 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.457496 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.473321 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.487356 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.500556 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.512965 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.542458 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.585814 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.624183 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.665188 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.702863 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.742801 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.784575 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.828361 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.862328 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.905164 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.938188 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.938293 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: E1001 06:17:00.938342 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs podName:d04a872f-a6a7-45d3-aa62-be934b7266c2 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:02.93832841 +0000 UTC m=+24.347985459 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs") pod "network-metrics-daemon-4g26h" (UID: "d04a872f-a6a7-45d3-aa62-be934b7266c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.944372 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:00 crc kubenswrapper[4747]: I1001 06:17:00.981422 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:00Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.025430 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.065541 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.103302 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.142553 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.182064 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.220301 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.261415 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.276231 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:01 crc kubenswrapper[4747]: E1001 06:17:01.276449 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.301083 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.424146 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189"} Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.427096 4747 generic.go:334] "Generic (PLEG): container finished" podID="57f2f957-14c5-4cef-81ae-b01b1693f15d" containerID="1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa" exitCode=0 Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.427147 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" event={"ID":"57f2f957-14c5-4cef-81ae-b01b1693f15d","Type":"ContainerDied","Data":"1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa"} Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.431617 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.431648 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.431663 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.431672 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.448068 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.462081 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.477704 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.492731 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.507049 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.539136 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.585279 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.622537 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.660099 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.679217 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.694281 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.701842 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.723637 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.760323 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.800592 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.845311 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.882247 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.910624 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-4zq78"] Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.911268 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.928439 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:01Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.932769 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.952669 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.972989 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 01 06:17:01 crc kubenswrapper[4747]: I1001 06:17:01.992532 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.041803 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.049261 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/67f55a21-76c4-4456-af5e-3f7f9c2b939d-serviceca\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.049341 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2rzz\" (UniqueName: \"kubernetes.io/projected/67f55a21-76c4-4456-af5e-3f7f9c2b939d-kube-api-access-r2rzz\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.049414 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67f55a21-76c4-4456-af5e-3f7f9c2b939d-host\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.103643 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.125057 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.150892 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/67f55a21-76c4-4456-af5e-3f7f9c2b939d-serviceca\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.150963 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2rzz\" (UniqueName: \"kubernetes.io/projected/67f55a21-76c4-4456-af5e-3f7f9c2b939d-kube-api-access-r2rzz\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.151025 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67f55a21-76c4-4456-af5e-3f7f9c2b939d-host\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.151121 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/67f55a21-76c4-4456-af5e-3f7f9c2b939d-host\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.153441 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/67f55a21-76c4-4456-af5e-3f7f9c2b939d-serviceca\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.167330 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.198086 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2rzz\" (UniqueName: \"kubernetes.io/projected/67f55a21-76c4-4456-af5e-3f7f9c2b939d-kube-api-access-r2rzz\") pod \"node-ca-4zq78\" (UID: \"67f55a21-76c4-4456-af5e-3f7f9c2b939d\") " pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.222872 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-4zq78" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.223122 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: W1001 06:17:02.238369 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67f55a21_76c4_4456_af5e_3f7f9c2b939d.slice/crio-92b648547f920f3d497a7f0df932b3daf12d6c8ed6808b442ee7022d10e3a194 WatchSource:0}: Error finding container 92b648547f920f3d497a7f0df932b3daf12d6c8ed6808b442ee7022d10e3a194: Status 404 returned error can't find the container with id 92b648547f920f3d497a7f0df932b3daf12d6c8ed6808b442ee7022d10e3a194 Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.251987 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.252230 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:17:06.252212971 +0000 UTC m=+27.661870030 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.266615 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.275660 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.275660 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.275821 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.275841 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.275911 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.276077 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.305504 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.344604 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.352725 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.352789 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.352823 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.352855 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.352935 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.352982 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353004 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353017 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353019 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:06.353002584 +0000 UTC m=+27.762659633 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353079 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:06.353061225 +0000 UTC m=+27.762718394 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353199 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353280 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:06.35326286 +0000 UTC m=+27.762919909 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353510 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353657 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353680 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.353858 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:06.353835124 +0000 UTC m=+27.763492183 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.383585 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.419776 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.442578 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.442625 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.444857 4747 generic.go:334] "Generic (PLEG): container finished" podID="57f2f957-14c5-4cef-81ae-b01b1693f15d" containerID="75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be" exitCode=0 Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.444959 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" event={"ID":"57f2f957-14c5-4cef-81ae-b01b1693f15d","Type":"ContainerDied","Data":"75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be"} Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.446004 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-4zq78" event={"ID":"67f55a21-76c4-4456-af5e-3f7f9c2b939d","Type":"ContainerStarted","Data":"92b648547f920f3d497a7f0df932b3daf12d6c8ed6808b442ee7022d10e3a194"} Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.463181 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.481427 4747 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.520044 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.562779 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.606936 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.641588 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.694544 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.723827 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.761879 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.801370 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.843982 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.882712 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.925488 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.958947 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.959115 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: E1001 06:17:02.959169 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs podName:d04a872f-a6a7-45d3-aa62-be934b7266c2 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:06.959151633 +0000 UTC m=+28.368808682 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs") pod "network-metrics-daemon-4g26h" (UID: "d04a872f-a6a7-45d3-aa62-be934b7266c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:02 crc kubenswrapper[4747]: I1001 06:17:02.968506 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:02Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.008424 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.050456 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.080249 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.124271 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.276000 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:03 crc kubenswrapper[4747]: E1001 06:17:03.276218 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.452332 4747 generic.go:334] "Generic (PLEG): container finished" podID="57f2f957-14c5-4cef-81ae-b01b1693f15d" containerID="4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538" exitCode=0 Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.452547 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" event={"ID":"57f2f957-14c5-4cef-81ae-b01b1693f15d","Type":"ContainerDied","Data":"4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538"} Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.455296 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-4zq78" event={"ID":"67f55a21-76c4-4456-af5e-3f7f9c2b939d","Type":"ContainerStarted","Data":"f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f"} Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.470202 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.487201 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.507485 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.521693 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.536712 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.549602 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.566814 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.581284 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.596491 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.620185 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.643533 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.656347 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.668419 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.678952 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.726864 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.761831 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.806504 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.846287 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.883506 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.928788 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:03 crc kubenswrapper[4747]: I1001 06:17:03.966296 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:03Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.026192 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.045898 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.084505 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.125067 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.173815 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.208455 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.245510 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.275970 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.276030 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:04 crc kubenswrapper[4747]: E1001 06:17:04.276161 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:04 crc kubenswrapper[4747]: E1001 06:17:04.276232 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.276249 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:04 crc kubenswrapper[4747]: E1001 06:17:04.276463 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.292812 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.323857 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.466220 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.469482 4747 generic.go:334] "Generic (PLEG): container finished" podID="57f2f957-14c5-4cef-81ae-b01b1693f15d" containerID="4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86" exitCode=0 Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.469556 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" event={"ID":"57f2f957-14c5-4cef-81ae-b01b1693f15d","Type":"ContainerDied","Data":"4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86"} Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.487662 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.505346 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.522863 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.537155 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.554506 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.567955 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.605958 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.644698 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.682692 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.735930 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.770655 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.805695 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.844333 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.884619 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.929113 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:04Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:04 crc kubenswrapper[4747]: I1001 06:17:04.998837 4747 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.000837 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.000886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.000904 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.001070 4747 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.013405 4747 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.013860 4747 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.015481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.015697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.015934 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.016119 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.016286 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: E1001 06:17:05.035855 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.040902 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.041113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.041291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.041443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.041577 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: E1001 06:17:05.061939 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.066918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.066968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.066984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.067005 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.067026 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: E1001 06:17:05.088424 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.093221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.093292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.093312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.093335 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.093352 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: E1001 06:17:05.115623 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.122199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.122270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.122294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.122327 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.122350 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: E1001 06:17:05.143054 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: E1001 06:17:05.143264 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.145160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.145188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.145199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.145216 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.145228 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.247880 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.247919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.247930 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.247947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.247959 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.275904 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:05 crc kubenswrapper[4747]: E1001 06:17:05.276150 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.351135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.351208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.351230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.351608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.351629 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.454533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.454605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.454628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.454660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.454683 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.478325 4747 generic.go:334] "Generic (PLEG): container finished" podID="57f2f957-14c5-4cef-81ae-b01b1693f15d" containerID="5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd" exitCode=0 Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.478393 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" event={"ID":"57f2f957-14c5-4cef-81ae-b01b1693f15d","Type":"ContainerDied","Data":"5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.499627 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.521678 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.541678 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.557671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.557741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.557767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.557788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.557842 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.561090 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.594424 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.617170 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.631233 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.647039 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.663209 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.663278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.663304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.663333 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.663352 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.667872 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.684167 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.699954 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.712457 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.727978 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.745118 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.757172 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:05Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.767106 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.767164 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.767182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.767203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.767221 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.869853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.869916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.869939 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.869968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.869990 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.972795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.972845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.972856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.972873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.972883 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:05Z","lastTransitionTime":"2025-10-01T06:17:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.981067 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.988475 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 01 06:17:05 crc kubenswrapper[4747]: I1001 06:17:05.995028 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.007977 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.039952 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.059343 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.074471 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.077366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.077433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.077462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.077494 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.077517 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.088897 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.109747 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.128653 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.145373 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.170435 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.180508 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.180589 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.180630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.180651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.180668 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.185661 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.207298 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.226242 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.246903 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.267723 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.276185 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.276255 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.276351 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.276518 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.277241 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.277579 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.284628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.284690 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.284930 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.284967 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.285052 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.298072 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.300123 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.300304 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:17:14.300269426 +0000 UTC m=+35.709926535 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.319072 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.350848 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.373361 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.388154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.388233 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.388259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.388292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.388315 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.390336 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.401430 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.401504 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.401573 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.401600 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401644 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401715 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:14.401694783 +0000 UTC m=+35.811351842 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401777 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401796 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401809 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401814 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401851 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401871 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401884 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:14.401869908 +0000 UTC m=+35.811526967 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401932 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:14.401911099 +0000 UTC m=+35.811568198 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.401936 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:06 crc kubenswrapper[4747]: E1001 06:17:06.402034 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:14.402006562 +0000 UTC m=+35.811663671 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.412037 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.432133 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.468270 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.485517 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" event={"ID":"57f2f957-14c5-4cef-81ae-b01b1693f15d","Type":"ContainerStarted","Data":"a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.494631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.494679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.494692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.494712 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.494725 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.495970 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.496491 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.513841 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.534820 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.543712 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.588627 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.598071 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.598122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.598134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.598151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.598161 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.623945 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.662884 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.700604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.700665 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.700685 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.700709 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.700726 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.710553 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.748352 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.786273 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.803424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.803487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.803505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.803529 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.803545 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.837919 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.868036 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.905944 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.906010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.906026 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.906049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.906065 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:06Z","lastTransitionTime":"2025-10-01T06:17:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.917904 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.947906 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:06 crc kubenswrapper[4747]: I1001 06:17:06.985698 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:06Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.008788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.008836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.008849 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.008866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.008877 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.014019 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:07 crc kubenswrapper[4747]: E1001 06:17:07.014216 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:07 crc kubenswrapper[4747]: E1001 06:17:07.014290 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs podName:d04a872f-a6a7-45d3-aa62-be934b7266c2 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:15.014269834 +0000 UTC m=+36.423926953 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs") pod "network-metrics-daemon-4g26h" (UID: "d04a872f-a6a7-45d3-aa62-be934b7266c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.021417 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.066214 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.106131 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.111007 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.111075 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.111098 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.111129 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.111152 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.144593 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.186862 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.214383 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.214469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.214492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.214528 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.214550 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.224493 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.268025 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.276268 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:07 crc kubenswrapper[4747]: E1001 06:17:07.276428 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.301815 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.317236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.317292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.317314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.317337 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.317354 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.343681 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.390408 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.420188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.420274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.420298 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.420329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.420351 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.435855 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.468028 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.498015 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.498340 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.522889 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.522945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.522966 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.522990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.523008 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.529828 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.550709 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.569854 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.586891 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.626363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.626419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.626436 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.626459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.626476 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.628986 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.670506 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.706994 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.729685 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.729817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.729843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.729869 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.729886 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.746400 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.781969 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.822808 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.833272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.833325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.833335 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.833352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.833363 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.872974 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.922714 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.935404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.935441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.935450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.935464 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.935473 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:07Z","lastTransitionTime":"2025-10-01T06:17:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.943147 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:07 crc kubenswrapper[4747]: I1001 06:17:07.987291 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.024371 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:08Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.037867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.037905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.037917 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.037936 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.037950 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.059121 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:08Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.098621 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:08Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.140530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.140560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.140568 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.140580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.140589 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.243565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.243613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.243622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.243636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.243645 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.275927 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:08 crc kubenswrapper[4747]: E1001 06:17:08.276070 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.276481 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:08 crc kubenswrapper[4747]: E1001 06:17:08.276555 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.276605 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:08 crc kubenswrapper[4747]: E1001 06:17:08.276660 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.346229 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.346263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.346273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.346290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.346304 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.448939 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.448974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.448982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.448996 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.449005 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.530189 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.551141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.551215 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.551234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.551258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.551275 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.653918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.653997 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.654021 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.654049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.654072 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.757363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.757417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.757433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.757457 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.757474 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.861735 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.861806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.861824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.861846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.861861 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.968598 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.968680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.968703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.968733 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:08 crc kubenswrapper[4747]: I1001 06:17:08.968792 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:08Z","lastTransitionTime":"2025-10-01T06:17:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.071411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.071823 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.071843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.071866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.071883 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.174372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.174427 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.174449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.174476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.174495 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.276209 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:09 crc kubenswrapper[4747]: E1001 06:17:09.276402 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.277539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.277594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.277616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.277643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.277682 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.297626 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.331103 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.352093 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.370735 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.380402 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.380460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.380478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.380505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.380522 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.400994 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.440292 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.462344 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.483671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.483928 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.483947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.484080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.484099 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.486642 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.509045 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.530495 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.536926 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/0.log" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.545336 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71" exitCode=1 Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.545420 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.547372 4747 scope.go:117] "RemoveContainer" containerID="a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.552196 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.569310 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.588776 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.589110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.589313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.589514 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.589690 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.590330 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.610814 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.630245 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.653646 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.693519 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.693575 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.693593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.693618 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.693637 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.700239 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.721417 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.754505 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.776630 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.797931 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.798237 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.798265 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.798297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.798321 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.798896 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.818661 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.838678 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.856248 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.872892 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.886806 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.900874 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.900916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.900968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.900995 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.901044 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:09Z","lastTransitionTime":"2025-10-01T06:17:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.903649 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.918941 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.931510 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.950690 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.964697 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:09 crc kubenswrapper[4747]: I1001 06:17:09.995470 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:09Z\\\",\\\"message\\\":\\\"ient-go/informers/factory.go:160\\\\nI1001 06:17:08.967792 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:08.968363 6021 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:08.968437 6021 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:08.968501 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 06:17:08.968543 6021 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 06:17:08.969214 6021 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 06:17:08.969258 6021 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 06:17:08.969284 6021 factory.go:656] Stopping watch factory\\\\nI1001 06:17:08.969290 6021 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 06:17:08.969304 6021 ovnkube.go:599] Stopped ovnkube\\\\nI1001 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.004058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.004124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.004141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.004630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.004695 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.108199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.108264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.108281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.108304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.108322 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.211088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.211141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.211154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.211172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.211186 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.276155 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:10 crc kubenswrapper[4747]: E1001 06:17:10.276329 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.276645 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.276686 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:10 crc kubenswrapper[4747]: E1001 06:17:10.276821 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:10 crc kubenswrapper[4747]: E1001 06:17:10.276920 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.313792 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.313857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.313875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.313900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.313919 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.416300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.416344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.416355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.416372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.416383 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.518857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.518912 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.518929 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.518954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.518972 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.552074 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/0.log" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.555960 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.556113 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.579524 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.599206 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:09Z\\\",\\\"message\\\":\\\"ient-go/informers/factory.go:160\\\\nI1001 06:17:08.967792 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:08.968363 6021 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:08.968437 6021 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:08.968501 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 06:17:08.968543 6021 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 06:17:08.969214 6021 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 06:17:08.969258 6021 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 06:17:08.969284 6021 factory.go:656] Stopping watch factory\\\\nI1001 06:17:08.969290 6021 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 06:17:08.969304 6021 ovnkube.go:599] Stopped ovnkube\\\\nI1001 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.619971 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.623143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.623186 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.623200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.623219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.623236 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.634725 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.658424 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.688740 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.704399 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.725198 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.725636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.725695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.725785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.725839 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.725866 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.742249 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.761908 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.776676 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.793286 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.809235 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.828473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.828524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.828539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.828563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.828581 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.829463 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.847525 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.870008 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.931311 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.931377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.931397 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.931422 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:10 crc kubenswrapper[4747]: I1001 06:17:10.931441 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:10Z","lastTransitionTime":"2025-10-01T06:17:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.034674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.034739 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.034806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.034829 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.034846 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.137567 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.137627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.137643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.137667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.137685 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.239967 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.240017 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.240033 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.240052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.240067 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.276816 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:11 crc kubenswrapper[4747]: E1001 06:17:11.276969 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.342431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.342486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.342502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.342526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.342543 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.445493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.445551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.445568 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.445591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.445608 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.549811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.549903 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.549930 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.549960 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.549986 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.562409 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/1.log" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.563675 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/0.log" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.568081 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222" exitCode=1 Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.568135 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.568198 4747 scope.go:117] "RemoveContainer" containerID="a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.569505 4747 scope.go:117] "RemoveContainer" containerID="cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222" Oct 01 06:17:11 crc kubenswrapper[4747]: E1001 06:17:11.569898 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.604920 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.627959 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.649081 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.656130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.656238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.656258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.656283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.656301 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.668312 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.691092 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.709647 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.725666 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.750177 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.758647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.758692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.758702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.758717 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.758730 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.767847 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.787102 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.807264 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.824554 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.846349 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.861149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.861193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.861230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.861248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.861260 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.865564 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.880797 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.911970 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9ac0848c192cd6f05c4660f89d4b94f13826fb9a1d74bd8d27b495f6d2b9d71\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:09Z\\\",\\\"message\\\":\\\"ient-go/informers/factory.go:160\\\\nI1001 06:17:08.967792 6021 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:08.968363 6021 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:08.968437 6021 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:08.968501 6021 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 06:17:08.968543 6021 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI1001 06:17:08.969214 6021 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1001 06:17:08.969258 6021 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1001 06:17:08.969284 6021 factory.go:656] Stopping watch factory\\\\nI1001 06:17:08.969290 6021 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1001 06:17:08.969304 6021 ovnkube.go:599] Stopped ovnkube\\\\nI1001 06\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:10Z\\\",\\\"message\\\":\\\": failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z]\\\\nI1001 06:17:10.645254 6159 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/packageserver-service]} name:Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:10.645355 6159 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1001 06:17:10.644974 6159 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-p48hw\\\\nI1001 06:17:10.645373 6159 services_controller.go:443] Built service op\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:11Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.963636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.963705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.963726 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.963789 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:11 crc kubenswrapper[4747]: I1001 06:17:11.963809 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:11Z","lastTransitionTime":"2025-10-01T06:17:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.066988 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.067044 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.067063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.067089 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.067106 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.169931 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.169997 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.170014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.170041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.170070 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.273517 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.273580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.273597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.273622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.273642 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.275738 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.275854 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.275738 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:12 crc kubenswrapper[4747]: E1001 06:17:12.275967 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:12 crc kubenswrapper[4747]: E1001 06:17:12.276205 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:12 crc kubenswrapper[4747]: E1001 06:17:12.276319 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.376841 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.376896 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.376914 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.376938 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.376955 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.480304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.480373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.480390 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.480840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.480873 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.575017 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/1.log" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.580826 4747 scope.go:117] "RemoveContainer" containerID="cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222" Oct 01 06:17:12 crc kubenswrapper[4747]: E1001 06:17:12.581007 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.583249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.583281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.583291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.583305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.583317 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.602563 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.608585 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd"] Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.609073 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: W1001 06:17:12.610897 4747 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert": failed to list *v1.Secret: secrets "ovn-control-plane-metrics-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Oct 01 06:17:12 crc kubenswrapper[4747]: E1001 06:17:12.610956 4747 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovn-control-plane-metrics-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"ovn-control-plane-metrics-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.611458 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.626331 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.642531 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.664122 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.666626 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.666821 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.666909 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsxxq\" (UniqueName: \"kubernetes.io/projected/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-kube-api-access-hsxxq\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.667001 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.680939 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.686660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.686714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.686732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.686787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.686812 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.697025 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.715596 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.731946 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.752858 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.768255 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.768335 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.768408 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsxxq\" (UniqueName: \"kubernetes.io/projected/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-kube-api-access-hsxxq\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.768488 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.769693 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.770055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.785408 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:10Z\\\",\\\"message\\\":\\\": failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z]\\\\nI1001 06:17:10.645254 6159 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/packageserver-service]} name:Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:10.645355 6159 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1001 06:17:10.644974 6159 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-p48hw\\\\nI1001 06:17:10.645373 6159 services_controller.go:443] Built service op\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.790136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.790188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.790206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.790231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.790249 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.794594 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsxxq\" (UniqueName: \"kubernetes.io/projected/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-kube-api-access-hsxxq\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.804576 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.820972 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.847368 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.862295 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.875894 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.890208 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.892795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.892840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.892850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.892866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.892876 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.905680 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.934334 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.951278 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.968450 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.980487 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.994823 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.994874 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.994891 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.994916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:12 crc kubenswrapper[4747]: I1001 06:17:12.994933 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:12Z","lastTransitionTime":"2025-10-01T06:17:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:12.999950 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:12Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.013581 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.030316 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.051281 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.063900 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.085462 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.097950 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.098029 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.098046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.098070 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.100854 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.110446 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.125481 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.141012 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.171862 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:10Z\\\",\\\"message\\\":\\\": failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z]\\\\nI1001 06:17:10.645254 6159 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/packageserver-service]} name:Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:10.645355 6159 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1001 06:17:10.644974 6159 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-p48hw\\\\nI1001 06:17:10.645373 6159 services_controller.go:443] Built service op\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.187326 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.203279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.203336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.203353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.203377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.203395 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.206487 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:13Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.276259 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:13 crc kubenswrapper[4747]: E1001 06:17:13.276465 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.305694 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.305799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.305826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.305854 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.305873 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.408485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.408554 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.408572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.408596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.408615 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.511259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.511327 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.511351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.511382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.511410 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.615211 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.615557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.615613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.615636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.615654 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.718703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.718784 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.718802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.718827 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.718844 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: E1001 06:17:13.768854 4747 secret.go:188] Couldn't get secret openshift-ovn-kubernetes/ovn-control-plane-metrics-cert: failed to sync secret cache: timed out waiting for the condition Oct 01 06:17:13 crc kubenswrapper[4747]: E1001 06:17:13.768975 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovn-control-plane-metrics-cert podName:669e0e5d-344c-47cb-87a7-1fa4ffd5e88b nodeName:}" failed. No retries permitted until 2025-10-01 06:17:14.268948692 +0000 UTC m=+35.678605771 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-control-plane-metrics-cert" (UniqueName: "kubernetes.io/secret/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovn-control-plane-metrics-cert") pod "ovnkube-control-plane-749d76644c-lnhpd" (UID: "669e0e5d-344c-47cb-87a7-1fa4ffd5e88b") : failed to sync secret cache: timed out waiting for the condition Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.821522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.821577 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.821593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.821617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.821633 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.925030 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.925082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.925098 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.925120 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:13 crc kubenswrapper[4747]: I1001 06:17:13.925138 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:13Z","lastTransitionTime":"2025-10-01T06:17:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.027980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.028047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.028070 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.028100 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.028125 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.131199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.131250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.131267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.131290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.131307 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.209104 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.234166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.234235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.234257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.234288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.234310 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.275787 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.275864 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.275955 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.275801 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.276127 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.276352 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.286465 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.290902 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/669e0e5d-344c-47cb-87a7-1fa4ffd5e88b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-lnhpd\" (UID: \"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.337558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.337624 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.337651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.337681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.337704 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.387126 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.387321 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:17:30.387291977 +0000 UTC m=+51.796949036 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.429871 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.440232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.440309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.440335 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.440367 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.440389 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: W1001 06:17:14.453962 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod669e0e5d_344c_47cb_87a7_1fa4ffd5e88b.slice/crio-419f4f047375115f27a06f35e31c471e710cdeae503d3f25a178f165b462294d WatchSource:0}: Error finding container 419f4f047375115f27a06f35e31c471e710cdeae503d3f25a178f165b462294d: Status 404 returned error can't find the container with id 419f4f047375115f27a06f35e31c471e710cdeae503d3f25a178f165b462294d Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.488163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.488250 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.488309 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.488378 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488466 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488580 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:30.488548621 +0000 UTC m=+51.898205710 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488611 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488645 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488668 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488739 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:30.488716145 +0000 UTC m=+51.898373234 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488907 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488938 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.488957 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.489020 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:30.488997322 +0000 UTC m=+51.898654401 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.489093 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:14 crc kubenswrapper[4747]: E1001 06:17:14.489151 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:30.489133105 +0000 UTC m=+51.898790194 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.544526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.544609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.544625 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.544644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.544660 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.592363 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" event={"ID":"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b","Type":"ContainerStarted","Data":"419f4f047375115f27a06f35e31c471e710cdeae503d3f25a178f165b462294d"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.648133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.648189 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.648208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.648235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.648253 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.751989 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.752060 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.752083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.752110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.752125 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.855193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.855246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.855267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.855295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.855316 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.958170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.958227 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.958244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.958267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:14 crc kubenswrapper[4747]: I1001 06:17:14.958284 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:14Z","lastTransitionTime":"2025-10-01T06:17:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.061194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.061263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.061289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.061321 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.061344 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.094173 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.094309 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.094372 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs podName:d04a872f-a6a7-45d3-aa62-be934b7266c2 nodeName:}" failed. No retries permitted until 2025-10-01 06:17:31.094354212 +0000 UTC m=+52.504011271 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs") pod "network-metrics-daemon-4g26h" (UID: "d04a872f-a6a7-45d3-aa62-be934b7266c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.165109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.165163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.165174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.165194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.165206 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.268364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.268406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.268414 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.268428 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.268436 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.275944 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.276110 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.372121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.372186 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.372207 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.372230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.372247 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.399556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.399658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.399704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.399729 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.399784 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.420905 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.426938 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.426976 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.426988 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.427003 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.427014 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.447908 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.453089 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.453155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.453172 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.453197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.453214 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.476921 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.482063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.482121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.482136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.482154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.482168 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.502680 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.507536 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.507601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.507622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.507647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.507667 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.532985 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: E1001 06:17:15.533138 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.535088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.535116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.535126 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.535142 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.535153 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.598869 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" event={"ID":"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b","Type":"ContainerStarted","Data":"14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.598934 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" event={"ID":"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b","Type":"ContainerStarted","Data":"00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.622875 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.638454 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.638500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.638520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.638541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.638559 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.641094 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.687867 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.714228 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.725195 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.739438 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.740804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.740905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.741033 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.741059 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.741073 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.754826 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.769103 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.790157 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.821229 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:10Z\\\",\\\"message\\\":\\\": failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z]\\\\nI1001 06:17:10.645254 6159 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/packageserver-service]} name:Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:10.645355 6159 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1001 06:17:10.644974 6159 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-p48hw\\\\nI1001 06:17:10.645373 6159 services_controller.go:443] Built service op\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.836024 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.843698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.843740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.843768 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.843785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.843799 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.859468 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.885070 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.920740 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.943176 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.946122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.946208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.946231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.946262 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.946285 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:15Z","lastTransitionTime":"2025-10-01T06:17:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.959187 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:15 crc kubenswrapper[4747]: I1001 06:17:15.976819 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:15Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.049353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.049407 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.049428 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.049450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.049466 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.152931 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.152998 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.153009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.153057 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.153074 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.256095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.256187 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.256213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.256245 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.256267 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.276508 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.276599 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.276596 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:16 crc kubenswrapper[4747]: E1001 06:17:16.276710 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:16 crc kubenswrapper[4747]: E1001 06:17:16.276922 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:16 crc kubenswrapper[4747]: E1001 06:17:16.277059 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.360238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.360293 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.360309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.360333 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.360351 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.463580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.463640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.463658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.463684 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.463702 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.566277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.566322 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.566333 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.566349 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.566362 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.669629 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.669679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.669698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.669722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.669743 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.773383 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.773463 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.773483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.773509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.773528 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.877391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.877469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.877486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.877517 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.877539 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.980872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.980926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.980947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.980976 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:16 crc kubenswrapper[4747]: I1001 06:17:16.980997 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:16Z","lastTransitionTime":"2025-10-01T06:17:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.083816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.083903 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.083927 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.083959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.083984 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.186798 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.186883 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.186901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.186934 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.186953 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.276097 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:17 crc kubenswrapper[4747]: E1001 06:17:17.276370 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.290324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.290390 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.290407 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.290432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.290450 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.393636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.393709 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.393731 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.393836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.393870 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.497163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.497242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.497261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.497285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.497303 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.600018 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.600128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.600154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.600183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.600205 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.702303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.702367 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.702379 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.702399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.702411 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.805486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.805533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.805551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.805573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.805591 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.909087 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.909140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.909156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.909175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:17 crc kubenswrapper[4747]: I1001 06:17:17.909188 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:17Z","lastTransitionTime":"2025-10-01T06:17:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.013113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.013143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.013153 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.013168 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.013179 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.115498 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.115560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.115579 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.115605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.115622 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.218422 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.218485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.218503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.218526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.218546 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.276507 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.276543 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:18 crc kubenswrapper[4747]: E1001 06:17:18.276662 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.276508 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:18 crc kubenswrapper[4747]: E1001 06:17:18.276839 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:18 crc kubenswrapper[4747]: E1001 06:17:18.277058 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.322101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.322162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.322180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.322206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.322222 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.424529 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.424580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.424595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.424613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.424624 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.527952 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.528011 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.528027 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.528047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.528062 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.630626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.630681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.630695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.630717 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.630729 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.733975 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.734030 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.734041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.734060 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.734072 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.836461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.836566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.836580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.836604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.836619 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.939411 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.939471 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.939483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.939501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:18 crc kubenswrapper[4747]: I1001 06:17:18.939513 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:18Z","lastTransitionTime":"2025-10-01T06:17:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.041595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.041660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.041677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.041699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.041712 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.144834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.144941 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.144962 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.144985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.145002 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.249057 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.249111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.249128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.249152 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.249170 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.276556 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:19 crc kubenswrapper[4747]: E1001 06:17:19.276718 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.293607 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.313269 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.337597 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:10Z\\\",\\\"message\\\":\\\": failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z]\\\\nI1001 06:17:10.645254 6159 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/packageserver-service]} name:Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:10.645355 6159 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1001 06:17:10.644974 6159 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-p48hw\\\\nI1001 06:17:10.645373 6159 services_controller.go:443] Built service op\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.351140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.351503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.351610 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.351718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.351887 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.364931 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.390292 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.409902 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.425170 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.449790 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.454810 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.454859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.454875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.454898 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.454915 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.469987 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.485726 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.504213 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.517852 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.535603 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.551935 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.557180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.557231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.557247 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.557271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.557288 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.565302 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.580459 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.593302 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.659735 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.659844 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.659862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.659887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.659904 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.767398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.767449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.767466 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.767490 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.767508 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.870627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.870693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.870709 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.870736 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.870769 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.973627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.973681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.973693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.973711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:19 crc kubenswrapper[4747]: I1001 06:17:19.973723 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:19Z","lastTransitionTime":"2025-10-01T06:17:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.076668 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.076705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.076733 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.076774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.076785 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.180069 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.180129 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.180145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.180170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.180187 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.276097 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.276136 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.276172 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:20 crc kubenswrapper[4747]: E1001 06:17:20.276287 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:20 crc kubenswrapper[4747]: E1001 06:17:20.276437 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:20 crc kubenswrapper[4747]: E1001 06:17:20.276609 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.283831 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.283897 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.283915 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.283988 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.284007 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.388214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.388283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.388302 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.388332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.388356 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.491846 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.492325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.492501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.492698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.492918 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.595998 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.596495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.596696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.596874 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.597034 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.700887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.700926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.700935 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.700949 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.700962 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.804329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.804394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.804412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.804439 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.804456 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.906717 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.906832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.906849 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.906874 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:20 crc kubenswrapper[4747]: I1001 06:17:20.906891 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:20Z","lastTransitionTime":"2025-10-01T06:17:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.009181 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.009248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.009269 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.009298 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.009320 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.112867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.112932 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.112948 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.112971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.112989 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.216571 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.216654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.216671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.216702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.216720 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.276337 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:21 crc kubenswrapper[4747]: E1001 06:17:21.276541 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.319270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.319326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.319344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.319369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.319392 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.423414 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.423650 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.423666 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.423689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.423708 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.527270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.527371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.527394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.527424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.527443 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.630289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.630352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.630373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.630397 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.630415 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.732933 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.733008 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.733035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.733065 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.733086 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.835822 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.835858 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.835867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.835881 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.835890 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.938695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.938800 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.938841 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.938872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:21 crc kubenswrapper[4747]: I1001 06:17:21.938890 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:21Z","lastTransitionTime":"2025-10-01T06:17:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.041500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.041565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.041582 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.041608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.041631 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.148526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.149345 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.149398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.149431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.149455 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.253149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.253219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.253256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.253288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.253310 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.275963 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.275990 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:22 crc kubenswrapper[4747]: E1001 06:17:22.276153 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.276250 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:22 crc kubenswrapper[4747]: E1001 06:17:22.276415 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:22 crc kubenswrapper[4747]: E1001 06:17:22.276660 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.355714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.355820 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.355841 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.355866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.355884 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.458855 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.458907 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.458922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.458945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.458962 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.561900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.562148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.562287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.562425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.562542 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.665919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.666357 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.666561 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.666798 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.666963 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.769932 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.770310 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.770524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.770655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.770930 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.874326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.874711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.874944 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.875144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.875371 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.979249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.979353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.979378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.979417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:22 crc kubenswrapper[4747]: I1001 06:17:22.979446 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:22Z","lastTransitionTime":"2025-10-01T06:17:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.083639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.083702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.083720 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.083746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.083797 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.187378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.187420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.187430 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.187446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.187458 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.276285 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:23 crc kubenswrapper[4747]: E1001 06:17:23.276516 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.289439 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.289487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.289503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.289525 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.289542 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.392125 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.392436 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.392519 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.392609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.392771 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.495406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.495462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.495478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.495501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.495518 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.598349 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.598600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.598724 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.598898 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.599036 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.703274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.703703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.703869 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.704010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.704128 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.807551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.807614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.807639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.807669 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.807689 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.910960 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.911054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.911074 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.911101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:23 crc kubenswrapper[4747]: I1001 06:17:23.911119 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:23Z","lastTransitionTime":"2025-10-01T06:17:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.013870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.014553 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.014597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.014626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.014645 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.117591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.117649 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.117665 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.117688 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.117706 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.221387 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.221438 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.221456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.221483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.221500 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.276181 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.276301 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.276181 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:24 crc kubenswrapper[4747]: E1001 06:17:24.276400 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:24 crc kubenswrapper[4747]: E1001 06:17:24.276564 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:24 crc kubenswrapper[4747]: E1001 06:17:24.276657 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.324832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.324902 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.324924 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.324954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.324976 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.428324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.428408 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.428432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.428469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.428498 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.524733 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.526082 4747 scope.go:117] "RemoveContainer" containerID="cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.530675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.530725 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.530745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.530819 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.530839 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.633073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.633133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.633151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.633174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.633190 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.736416 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.736461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.736473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.736489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.736503 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.840066 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.840115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.840124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.840140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.840149 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.942301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.942333 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.942342 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.942355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:24 crc kubenswrapper[4747]: I1001 06:17:24.942365 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:24Z","lastTransitionTime":"2025-10-01T06:17:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.045231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.045298 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.045325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.045358 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.045382 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.148180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.148240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.148258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.148282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.148347 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.252090 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.252149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.252169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.252192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.252210 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.275934 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:25 crc kubenswrapper[4747]: E1001 06:17:25.276138 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.355178 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.355243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.355263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.355290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.355308 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.457852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.457893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.457906 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.457925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.457938 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.560878 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.560921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.560983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.561005 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.561022 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.636344 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/2.log" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.637325 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/1.log" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.641074 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4" exitCode=1 Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.641128 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.641185 4747 scope.go:117] "RemoveContainer" containerID="cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.643195 4747 scope.go:117] "RemoveContainer" containerID="5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4" Oct 01 06:17:25 crc kubenswrapper[4747]: E1001 06:17:25.643570 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.663974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.664043 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.664059 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.664083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.664100 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.671902 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.693325 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:10Z\\\",\\\"message\\\":\\\": failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z]\\\\nI1001 06:17:10.645254 6159 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/packageserver-service]} name:Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:10.645355 6159 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1001 06:17:10.644974 6159 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-p48hw\\\\nI1001 06:17:10.645373 6159 services_controller.go:443] Built service op\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.712042 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.731094 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.737592 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.737847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.738013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.738137 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.738303 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.745889 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: E1001 06:17:25.755435 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.759456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.759699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.759864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.760044 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.760165 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.767499 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: E1001 06:17:25.776700 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.781916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.781974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.781991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.782016 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.782035 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: E1001 06:17:25.799066 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.806917 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.809618 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.809693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.809714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.809738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.809790 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.826004 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: E1001 06:17:25.830420 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.835105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.835134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.835143 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.835156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.835165 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.851623 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: E1001 06:17:25.855447 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: E1001 06:17:25.855673 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.857922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.857968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.857984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.858007 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.858027 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.868386 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.891247 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.913538 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.934441 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.953435 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.961049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.961106 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.961122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.961148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.961166 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:25Z","lastTransitionTime":"2025-10-01T06:17:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.973572 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:25 crc kubenswrapper[4747]: I1001 06:17:25.991254 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:25Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.013386 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:26Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.064105 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.064161 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.064180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.064203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.064222 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.167540 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.167604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.167622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.167648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.167666 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.271138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.271542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.271854 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.272067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.272228 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.276387 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.276470 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:26 crc kubenswrapper[4747]: E1001 06:17:26.276558 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:26 crc kubenswrapper[4747]: E1001 06:17:26.276633 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.276975 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:26 crc kubenswrapper[4747]: E1001 06:17:26.277355 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.376175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.376240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.376259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.376282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.376300 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.479554 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.479609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.479626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.479647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.479667 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.583166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.583218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.583235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.583256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.583272 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.647909 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/2.log" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.685741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.685902 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.685934 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.685962 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.685983 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.788287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.788336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.788352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.788370 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.788384 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.891170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.891216 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.891233 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.891255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.891271 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.994635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.994683 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.994699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.994723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:26 crc kubenswrapper[4747]: I1001 06:17:26.994740 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:26Z","lastTransitionTime":"2025-10-01T06:17:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.097150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.097214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.097231 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.097256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.097274 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.152963 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.163010 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.177970 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.200658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.200725 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.200743 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.200795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.200814 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.208193 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.230340 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.250276 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.269714 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.276801 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:27 crc kubenswrapper[4747]: E1001 06:17:27.276970 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.292913 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.303921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.303971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.303987 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.304010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.304026 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.311538 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.332258 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.351793 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.375503 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.392324 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.406672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.407157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.407302 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.407443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.407596 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.412317 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.426787 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.444449 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.463149 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.493288 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:10Z\\\",\\\"message\\\":\\\": failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z]\\\\nI1001 06:17:10.645254 6159 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/packageserver-service]} name:Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:10.645355 6159 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1001 06:17:10.644974 6159 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-p48hw\\\\nI1001 06:17:10.645373 6159 services_controller.go:443] Built service op\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.510996 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.511060 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.511079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.511104 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.511121 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.511520 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:27Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.614345 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.614393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.614408 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.614425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.614437 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.717482 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.717926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.718093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.718234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.718354 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.822124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.822224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.822243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.822271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.822289 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.925914 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.925982 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.926004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.926032 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:27 crc kubenswrapper[4747]: I1001 06:17:27.926053 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:27Z","lastTransitionTime":"2025-10-01T06:17:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.028854 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.028910 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.028926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.028949 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.028965 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.132462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.132565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.132588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.132619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.132643 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.235435 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.235483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.235502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.235526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.235544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.276446 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.276493 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.276634 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:28 crc kubenswrapper[4747]: E1001 06:17:28.276886 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:28 crc kubenswrapper[4747]: E1001 06:17:28.277007 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:28 crc kubenswrapper[4747]: E1001 06:17:28.277222 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.339141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.339196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.339214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.339239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.339256 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.441926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.441985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.442002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.442031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.442049 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.545099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.545154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.545170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.545192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.545208 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.648177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.648244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.648261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.648290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.648307 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.750688 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.750720 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.750731 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.750765 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.750787 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.853077 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.853122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.853133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.853150 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.853161 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.955888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.955923 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.955931 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.955946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:28 crc kubenswrapper[4747]: I1001 06:17:28.955954 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:28Z","lastTransitionTime":"2025-10-01T06:17:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.058093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.058158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.058181 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.058207 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.058224 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.160887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.160950 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.160967 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.161045 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.161066 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.265140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.265190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.265206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.265228 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.265246 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.277138 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:29 crc kubenswrapper[4747]: E1001 06:17:29.277528 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.296927 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.323948 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.342799 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.357721 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.368253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.368328 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.368347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.368373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.368393 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.381487 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.400120 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.417135 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.435319 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.451807 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.471149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.471203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.471224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.471256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.471278 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.472323 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.484876 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.509419 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cd23a28f043341b1854fc99197cd116078cbe5d7578602505a8a7f71bda26222\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:10Z\\\",\\\"message\\\":\\\": failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:10Z is after 2025-08-24T17:21:41Z]\\\\nI1001 06:17:10.645254 6159 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/packageserver-service]} name:Service_openshift-operator-lifecycle-manager/packageserver-service_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.153:5443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {5e50827b-d271-442b-b8a7-7f33b2cd6b11}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:10.645355 6159 lb_config.go:1031] Cluster endpoints for openshift-config-operator/metrics for network=default are: map[]\\\\nI1001 06:17:10.644974 6159 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-p48hw\\\\nI1001 06:17:10.645373 6159 services_controller.go:443] Built service op\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.522597 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.537991 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.560196 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.573602 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.573669 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.573692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.573722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.573745 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.581398 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.599401 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.610507 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:29Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.676473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.676531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.676549 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.676574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.676594 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.779017 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.779090 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.779111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.779140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.779162 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.881648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.881702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.881721 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.881744 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.881790 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.985610 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.985667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.985686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.985709 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:29 crc kubenswrapper[4747]: I1001 06:17:29.985726 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:29Z","lastTransitionTime":"2025-10-01T06:17:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.090787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.091158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.091175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.091200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.091218 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.194246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.194362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.194382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.194449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.194468 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.276185 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.276263 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.276290 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.276380 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.276470 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.276636 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.297075 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.297124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.297141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.297163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.297181 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.400466 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.400520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.400537 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.400563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.400580 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.466401 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.466603 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:18:02.466559628 +0000 UTC m=+83.876216717 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.503082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.503140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.503188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.503213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.503230 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.567852 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.567927 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.567977 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.568017 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568041 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568083 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568141 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:18:02.568121169 +0000 UTC m=+83.977778228 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568173 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:18:02.568151079 +0000 UTC m=+83.977808168 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568189 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568216 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568236 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568268 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568308 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568317 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 06:18:02.568295903 +0000 UTC m=+83.977952982 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568329 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:30 crc kubenswrapper[4747]: E1001 06:17:30.568405 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 06:18:02.568380756 +0000 UTC m=+83.978037845 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.606358 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.606737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.606957 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.607135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.607311 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.710279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.710339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.710356 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.710380 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.710395 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.813213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.813267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.813278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.813297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.813327 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.916548 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.916598 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.916613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.916631 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:30 crc kubenswrapper[4747]: I1001 06:17:30.916642 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:30Z","lastTransitionTime":"2025-10-01T06:17:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.020884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.020947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.020963 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.020991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.021010 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.123536 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.123577 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.123588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.123604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.123615 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.173206 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:31 crc kubenswrapper[4747]: E1001 06:17:31.173400 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:31 crc kubenswrapper[4747]: E1001 06:17:31.173519 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs podName:d04a872f-a6a7-45d3-aa62-be934b7266c2 nodeName:}" failed. No retries permitted until 2025-10-01 06:18:03.173490369 +0000 UTC m=+84.583147448 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs") pod "network-metrics-daemon-4g26h" (UID: "d04a872f-a6a7-45d3-aa62-be934b7266c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.227234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.227292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.227309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.227330 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.227347 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.276371 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:31 crc kubenswrapper[4747]: E1001 06:17:31.276553 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.330875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.330911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.330922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.330941 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.330956 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.433244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.433296 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.433307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.433323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.433334 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.536847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.536905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.536922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.536945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.536962 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.640198 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.640268 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.640290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.640322 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.640344 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.743676 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.743743 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.743805 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.743836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.743866 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.847325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.847395 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.847415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.847442 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.847465 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.950254 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.950300 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.950314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.950334 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:31 crc kubenswrapper[4747]: I1001 06:17:31.950348 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:31Z","lastTransitionTime":"2025-10-01T06:17:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.052257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.052311 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.052338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.052360 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.052377 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.155404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.155722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.155921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.156075 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.156203 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.260145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.260225 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.260238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.260254 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.260267 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.275694 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.275713 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:32 crc kubenswrapper[4747]: E1001 06:17:32.275831 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.275989 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:32 crc kubenswrapper[4747]: E1001 06:17:32.276036 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:32 crc kubenswrapper[4747]: E1001 06:17:32.276186 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.363034 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.363086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.363102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.363123 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.363137 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.466403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.466812 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.467198 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.467551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.467925 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.571386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.571452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.571469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.571495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.571521 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.674303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.674709 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.674939 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.675171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.675346 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.778063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.778133 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.778149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.778169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.778209 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.880795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.880827 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.880836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.880848 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.880858 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.983189 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.983612 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.983918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.984175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:32 crc kubenswrapper[4747]: I1001 06:17:32.984402 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:32Z","lastTransitionTime":"2025-10-01T06:17:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.087956 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.088011 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.088029 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.088053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.088071 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.191365 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.191419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.191431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.191450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.191466 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.276245 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:33 crc kubenswrapper[4747]: E1001 06:17:33.276429 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.293914 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.293970 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.293984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.294004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.294018 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.396444 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.396505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.396518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.396537 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.396551 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.499811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.499864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.499880 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.499905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.499922 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.602670 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.602723 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.602740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.602790 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.602808 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.705572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.705635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.705659 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.705693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.705714 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.809156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.809230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.809250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.809281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.809304 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.911608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.911689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.911714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.911744 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:33 crc kubenswrapper[4747]: I1001 06:17:33.911801 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:33Z","lastTransitionTime":"2025-10-01T06:17:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.014617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.014672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.014685 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.014702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.014714 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.118003 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.118055 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.118067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.118085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.118098 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.221372 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.221452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.221476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.221497 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.221514 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.276341 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:34 crc kubenswrapper[4747]: E1001 06:17:34.276588 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.276590 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.276677 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:34 crc kubenswrapper[4747]: E1001 06:17:34.276746 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:34 crc kubenswrapper[4747]: E1001 06:17:34.276936 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.323280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.323330 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.323338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.323353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.323364 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.426811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.426875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.426895 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.426922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.426936 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.530257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.530305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.530315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.530330 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.530341 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.640460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.640566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.640596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.640649 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.640678 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.744229 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.744586 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.744796 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.744966 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.745153 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.848565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.848696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.848720 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.848745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.848819 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.951817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.951871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.951884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.951904 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:34 crc kubenswrapper[4747]: I1001 06:17:34.951941 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:34Z","lastTransitionTime":"2025-10-01T06:17:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.054826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.055162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.055291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.055431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.055558 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.157447 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.157503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.157520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.157542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.157558 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.261166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.261807 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.262010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.262211 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.262406 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.276809 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:35 crc kubenswrapper[4747]: E1001 06:17:35.277602 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.365859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.365941 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.365965 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.365995 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.366020 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.468836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.468925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.468950 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.468980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.469003 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.572164 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.572235 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.572251 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.572277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.572294 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.674919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.674963 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.675001 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.675017 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.675028 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.777711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.777829 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.777856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.777887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.777909 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.881854 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.881918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.881943 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.881969 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.881987 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.951644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.951715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.951737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.951796 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.951820 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: E1001 06:17:35.972553 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:35Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.977587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.977635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.977647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.977664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.977677 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:35 crc kubenswrapper[4747]: E1001 06:17:35.995427 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:35Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.999462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.999495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.999506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.999532 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:35 crc kubenswrapper[4747]: I1001 06:17:35.999549 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:35Z","lastTransitionTime":"2025-10-01T06:17:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: E1001 06:17:36.019681 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:36Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.025693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.025787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.025812 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.025835 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.025852 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: E1001 06:17:36.048163 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:36Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.059252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.059316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.059337 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.059366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.059388 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: E1001 06:17:36.079990 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:36Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:36 crc kubenswrapper[4747]: E1001 06:17:36.080221 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.082360 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.082410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.082427 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.082450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.082469 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.185985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.186031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.186041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.186056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.186066 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.276635 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.276670 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:36 crc kubenswrapper[4747]: E1001 06:17:36.276835 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.276854 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:36 crc kubenswrapper[4747]: E1001 06:17:36.276992 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:36 crc kubenswrapper[4747]: E1001 06:17:36.277078 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.289373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.289564 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.289584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.289607 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.289626 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.391905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.391953 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.391970 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.391990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.392005 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.494813 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.495305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.495523 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.495816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.496035 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.600664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.600741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.600806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.600840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.600863 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.703650 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.703702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.703717 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.703771 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.703790 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.807102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.807155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.807177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.807205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.807223 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.909352 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.909396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.909406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.909424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:36 crc kubenswrapper[4747]: I1001 06:17:36.909437 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:36Z","lastTransitionTime":"2025-10-01T06:17:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.012440 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.012515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.012540 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.012573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.012598 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.115968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.116044 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.116063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.116086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.116103 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.218945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.219061 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.219082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.219107 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.219123 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.276544 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:37 crc kubenswrapper[4747]: E1001 06:17:37.276990 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.322185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.322225 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.322236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.322270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.322281 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.425521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.425614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.425637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.425664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.425686 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.528206 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.528321 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.528339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.528364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.528386 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.631833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.631907 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.631940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.631968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.631989 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.734946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.735022 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.735041 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.735063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.735079 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.837785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.837836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.837853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.837877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.837923 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.940507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.940560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.940569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.940584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:37 crc kubenswrapper[4747]: I1001 06:17:37.940593 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:37Z","lastTransitionTime":"2025-10-01T06:17:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.043420 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.043486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.043507 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.043536 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.043560 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.146402 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.146473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.146495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.146523 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.146544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.249906 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.249966 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.249983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.250009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.250027 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.276616 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.276626 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.276814 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:38 crc kubenswrapper[4747]: E1001 06:17:38.276994 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:38 crc kubenswrapper[4747]: E1001 06:17:38.277320 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:38 crc kubenswrapper[4747]: E1001 06:17:38.278048 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.278365 4747 scope.go:117] "RemoveContainer" containerID="5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4" Oct 01 06:17:38 crc kubenswrapper[4747]: E1001 06:17:38.278633 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.300253 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.333310 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.351544 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.353154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.353212 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.353229 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.353252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.353270 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.373736 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.395119 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.412594 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.436047 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.455945 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.456295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.456458 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.456609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.456776 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.474574 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.495556 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.519531 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.535546 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.553959 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.559566 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.559634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.559657 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.559686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.559706 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.572632 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.611515 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.633787 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.648080 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.659519 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.662226 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.662276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.662288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.662308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.662320 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.671328 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:38Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.765448 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.765598 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.765630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.765660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.765684 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.869436 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.869504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.869520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.869545 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.869565 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.973368 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.973488 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.973508 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.973533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:38 crc kubenswrapper[4747]: I1001 06:17:38.973549 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:38Z","lastTransitionTime":"2025-10-01T06:17:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.077065 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.077139 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.077156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.077182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.077200 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.180628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.180681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.180698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.180720 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.180739 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.276069 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:39 crc kubenswrapper[4747]: E1001 06:17:39.276327 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.283481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.283547 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.283571 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.283607 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.283633 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.297171 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.327122 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.355434 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.377483 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.386451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.386653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.386784 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.386931 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.387043 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.401478 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.418251 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.437565 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.455962 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.468877 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.484447 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.493446 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.493509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.493521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.493544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.493562 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.503950 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.518659 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.533698 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.546901 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.562234 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.576112 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.597669 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.597741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.597791 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.597825 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.597842 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.600063 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.615510 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:39Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.700574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.700674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.700693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.700716 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.700733 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.804154 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.804223 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.804246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.804276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.804298 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.907263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.907303 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.907313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.907329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:39 crc kubenswrapper[4747]: I1001 06:17:39.907338 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:39Z","lastTransitionTime":"2025-10-01T06:17:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.010991 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.011429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.011456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.011485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.011509 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.113494 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.113549 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.113565 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.113597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.113614 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.216124 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.216168 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.216178 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.216194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.216209 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.276311 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.276320 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:40 crc kubenswrapper[4747]: E1001 06:17:40.276502 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.276346 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:40 crc kubenswrapper[4747]: E1001 06:17:40.276575 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:40 crc kubenswrapper[4747]: E1001 06:17:40.276650 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.318862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.318919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.318940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.318963 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.318981 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.422637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.422739 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.422819 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.422859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.422900 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.525171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.525236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.525259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.525285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.525302 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.627711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.627814 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.627830 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.627845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.627908 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.729988 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.730053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.730072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.730096 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.730113 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.832785 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.832885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.832909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.832940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.832963 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.936155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.936228 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.936246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.936270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:40 crc kubenswrapper[4747]: I1001 06:17:40.936287 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:40Z","lastTransitionTime":"2025-10-01T06:17:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.038884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.038950 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.038966 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.038990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.039007 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.141707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.141832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.141863 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.141895 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.141918 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.244239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.244344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.244364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.244399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.244424 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.276285 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:41 crc kubenswrapper[4747]: E1001 06:17:41.276531 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.347856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.347987 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.348016 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.348049 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.348075 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.451181 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.451242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.451263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.451289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.451308 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.554355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.554415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.554433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.554458 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.554478 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.657362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.657395 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.657406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.657421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.657432 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.761062 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.761108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.761126 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.761147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.761163 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.864238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.864283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.864301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.864324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.864340 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.966813 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.966862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.966884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.966913 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:41 crc kubenswrapper[4747]: I1001 06:17:41.966934 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:41Z","lastTransitionTime":"2025-10-01T06:17:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.069284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.069328 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.069344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.069365 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.069382 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.172577 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.172617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.172635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.172656 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.172671 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.276058 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.276229 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.276274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.276291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.276313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.276328 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: E1001 06:17:42.276675 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.276703 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:42 crc kubenswrapper[4747]: E1001 06:17:42.276810 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.276527 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:42 crc kubenswrapper[4747]: E1001 06:17:42.277163 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.379305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.379359 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.379376 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.379398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.379417 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.482814 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.482860 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.482872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.482889 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.482900 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.585164 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.585239 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.585258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.585282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.585303 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.688940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.689016 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.689042 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.689068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.689086 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.792549 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.792597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.792613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.792636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.792654 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.898852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.898903 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.898925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.898953 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:42 crc kubenswrapper[4747]: I1001 06:17:42.898972 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:42Z","lastTransitionTime":"2025-10-01T06:17:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.004259 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.004312 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.004422 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.004439 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.004451 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.106810 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.106863 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.106879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.106900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.106916 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.209336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.209377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.209388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.209404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.209415 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.276811 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:43 crc kubenswrapper[4747]: E1001 06:17:43.277012 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.311691 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.311816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.311847 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.311879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.311902 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.414840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.414882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.414893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.414909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.414921 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.519304 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.519355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.519369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.519389 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.519403 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.622437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.622515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.622541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.622574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.622601 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.725160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.725251 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.725276 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.725309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.725333 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.828087 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.828152 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.828162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.828176 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.828185 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.930845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.930887 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.930898 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.930916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:43 crc kubenswrapper[4747]: I1001 06:17:43.930928 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:43Z","lastTransitionTime":"2025-10-01T06:17:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.033434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.033464 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.033497 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.033510 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.033518 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.136083 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.136146 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.136163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.136188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.136207 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.238620 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.238658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.238667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.238680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.238689 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.275994 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:44 crc kubenswrapper[4747]: E1001 06:17:44.276107 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.276350 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:44 crc kubenswrapper[4747]: E1001 06:17:44.276420 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.276718 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:44 crc kubenswrapper[4747]: E1001 06:17:44.277072 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.341778 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.341834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.341850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.341873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.341890 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.444200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.444255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.444272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.444295 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.444312 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.547675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.547772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.547791 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.547817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.547834 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.651421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.651481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.651502 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.651530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.651551 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.757004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.757356 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.757500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.757645 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.757801 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.860431 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.860531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.860550 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.860573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.860591 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.962499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.962596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.962614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.962639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:44 crc kubenswrapper[4747]: I1001 06:17:44.962656 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:44Z","lastTransitionTime":"2025-10-01T06:17:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.064672 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.064801 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.064821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.064844 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.064860 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.167419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.167452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.167460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.167475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.167488 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.270130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.270169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.270182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.270199 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.270211 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.276657 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:45 crc kubenswrapper[4747]: E1001 06:17:45.276890 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.372419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.372496 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.372525 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.372559 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.372585 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.475248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.475316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.475330 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.475346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.475357 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.579279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.579333 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.579346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.579366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.579379 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.682292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.682361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.682381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.682410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.682433 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.785482 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.785607 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.785624 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.785643 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.785655 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.887477 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.887527 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.887542 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.887557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.887572 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.989999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.990042 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.990054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.990068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:45 crc kubenswrapper[4747]: I1001 06:17:45.990078 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:45Z","lastTransitionTime":"2025-10-01T06:17:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.092146 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.092240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.092263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.092292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.092310 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.194272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.194348 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.194370 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.194399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.194416 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.276720 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.276949 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.276739 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.277070 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.276720 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.277159 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.297491 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.297552 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.297569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.297593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.297610 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.350396 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.350459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.350477 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.350503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.350520 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.367167 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.372339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.372371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.372382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.372398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.372409 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.386084 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.391330 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.391376 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.391388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.391407 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.391420 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.406511 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.410702 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.410992 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.411157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.411307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.411464 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.430999 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.435119 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.435187 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.435205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.435232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.435254 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.453825 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: E1001 06:17:46.454037 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.455826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.455875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.455893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.455915 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.455932 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.558397 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.558487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.558512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.558544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.558573 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.661615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.661677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.661693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.661718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.661735 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.763469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.763530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.763547 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.763570 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.763577 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/0.log" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.763587 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.763619 4747 generic.go:334] "Generic (PLEG): container finished" podID="e2f2b780-a19d-4581-92f4-ca25c69a263c" containerID="2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d" exitCode=1 Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.763648 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvmkj" event={"ID":"e2f2b780-a19d-4581-92f4-ca25c69a263c","Type":"ContainerDied","Data":"2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.764020 4747 scope.go:117] "RemoveContainer" containerID="2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.779457 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.797759 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.809324 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.829081 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.844233 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.859864 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.872704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.872774 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.872788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.872804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.872814 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.880325 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.892073 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.911479 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.925595 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.944043 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.956195 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.973868 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.975443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.975483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.975495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.975512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.975523 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:46Z","lastTransitionTime":"2025-10-01T06:17:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:46 crc kubenswrapper[4747]: I1001 06:17:46.989057 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:46Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.012195 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.028192 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.043410 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.056969 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.077873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.077907 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.077921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.077940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.077952 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.180585 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.180628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.180638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.180654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.180666 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.275892 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:47 crc kubenswrapper[4747]: E1001 06:17:47.276113 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.282711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.282764 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.282776 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.282792 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.282803 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.385216 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.385253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.385264 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.385279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.385290 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.487310 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.487369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.487392 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.487419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.487442 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.589639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.589692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.589700 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.589716 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.589727 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.691813 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.691866 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.691881 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.691904 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.691922 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.768688 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/0.log" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.768761 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvmkj" event={"ID":"e2f2b780-a19d-4581-92f4-ca25c69a263c","Type":"ContainerStarted","Data":"9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.788788 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.793567 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.793619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.793635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.793658 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.793675 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.806913 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.825103 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.839906 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.858446 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.871573 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.886725 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.897834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.897888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.897905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.897929 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.897946 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:47Z","lastTransitionTime":"2025-10-01T06:17:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.900299 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.915678 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.934921 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.950609 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.978310 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:47 crc kubenswrapper[4747]: I1001 06:17:47.993040 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:47Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.001520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.001868 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.001886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.001911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.001931 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.009349 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:48Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.028507 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:48Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.041110 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:48Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.051139 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:48Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.063340 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:48Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.104564 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.104588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.104595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.104609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.104618 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.207478 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.207513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.207521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.207535 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.207544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.276671 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.276709 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.276811 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:48 crc kubenswrapper[4747]: E1001 06:17:48.277416 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:48 crc kubenswrapper[4747]: E1001 06:17:48.277530 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:48 crc kubenswrapper[4747]: E1001 06:17:48.277424 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.309163 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.309214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.309232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.309255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.309272 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.412546 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.412599 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.412615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.412637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.412654 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.514948 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.514986 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.514994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.515007 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.515018 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.617323 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.617568 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.617595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.617624 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.617645 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.720628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.720690 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.720706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.720728 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.720746 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.824008 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.824056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.824067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.824085 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.824098 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.926722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.926819 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.926839 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.926864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:48 crc kubenswrapper[4747]: I1001 06:17:48.926884 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:48Z","lastTransitionTime":"2025-10-01T06:17:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.030010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.030056 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.030066 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.030080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.030089 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.132217 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.132272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.132287 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.132307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.132321 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.234704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.234851 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.234904 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.234927 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.234944 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.276811 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:49 crc kubenswrapper[4747]: E1001 06:17:49.277269 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.292168 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.309850 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.324998 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.338338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.338426 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.338449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.338477 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.338501 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.345094 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.362628 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.396849 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.417884 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.438142 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.440677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.440732 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.440783 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.440807 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.440822 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.467963 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.481109 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.495402 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.510123 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.523121 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.535403 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.545118 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.545174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.545194 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.545219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.545237 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.547270 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.558725 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.575130 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.588436 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:49Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.648544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.648609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.648623 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.648641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.648653 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.750537 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.750581 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.750594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.750610 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.750622 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.853433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.853488 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.853509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.853534 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.853551 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.955692 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.955794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.955817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.955848 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:49 crc kubenswrapper[4747]: I1001 06:17:49.955872 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:49Z","lastTransitionTime":"2025-10-01T06:17:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.058127 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.058167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.058177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.058192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.058206 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.161557 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.161592 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.161600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.161614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.161622 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.263741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.263865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.263884 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.263906 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.263921 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.276181 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.276265 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:50 crc kubenswrapper[4747]: E1001 06:17:50.276433 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.276527 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:50 crc kubenswrapper[4747]: E1001 06:17:50.276645 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:50 crc kubenswrapper[4747]: E1001 06:17:50.276701 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.366200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.366242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.366251 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.366265 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.366274 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.468747 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.468832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.468852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.468875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.468893 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.574529 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.574587 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.574604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.574626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.574644 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.677828 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.677872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.677882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.677897 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.677907 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.781651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.781681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.781689 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.781701 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.781709 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.884198 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.884234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.884242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.884256 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.884264 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.987190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.987242 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.987253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.987270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:50 crc kubenswrapper[4747]: I1001 06:17:50.987281 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:50Z","lastTransitionTime":"2025-10-01T06:17:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.090190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.090261 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.090278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.090691 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.090742 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.193877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.193949 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.193971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.194002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.194022 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.276148 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:51 crc kubenswrapper[4747]: E1001 06:17:51.276288 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.297409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.297443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.297451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.297465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.297477 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.400717 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.401921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.401981 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.402002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.402014 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.504544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.504581 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.504590 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.504605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.504617 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.607128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.607181 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.607195 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.607218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.607231 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.710279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.710609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.710621 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.710636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.710647 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.813484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.813544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.813556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.813574 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.813587 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.915413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.915697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.915816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.915914 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:51 crc kubenswrapper[4747]: I1001 06:17:51.916015 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:51Z","lastTransitionTime":"2025-10-01T06:17:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.018334 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.018388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.018397 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.018412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.018421 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.121845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.122196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.122326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.122485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.122617 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.225165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.225504 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.225642 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.225810 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.225932 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.276099 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.276264 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:52 crc kubenswrapper[4747]: E1001 06:17:52.276594 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.276311 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:52 crc kubenswrapper[4747]: E1001 06:17:52.276669 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:52 crc kubenswrapper[4747]: E1001 06:17:52.276477 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.329171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.329243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.329265 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.329294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.329314 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.431545 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.431588 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.431598 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.431615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.431628 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.534509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.534864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.535019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.535175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.535302 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.638291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.638351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.638369 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.638392 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.638411 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.740597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.740640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.740651 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.740668 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.740680 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.842872 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.842911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.842921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.842937 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.842946 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.945326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.945388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.945404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.945428 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:52 crc kubenswrapper[4747]: I1001 06:17:52.945446 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:52Z","lastTransitionTime":"2025-10-01T06:17:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.048404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.048438 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.048450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.048466 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.048475 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.150899 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.150992 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.151019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.151055 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.151079 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.252703 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.252742 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.252766 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.252782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.252791 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.276471 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:53 crc kubenswrapper[4747]: E1001 06:17:53.277089 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.277558 4747 scope.go:117] "RemoveContainer" containerID="5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.356441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.356773 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.356882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.356978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.357118 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.459561 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.459600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.459611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.459629 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.459640 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.562355 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.562389 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.562401 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.562417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.562427 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.666023 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.666082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.666095 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.666111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.666123 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.769448 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.769493 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.769509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.769531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.769547 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.793236 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/2.log" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.796197 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.797859 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.812969 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.827168 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.838002 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.856865 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.872627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.872686 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.872704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.872727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.872745 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.873718 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.895482 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.908679 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.929046 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.960236 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.974866 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.975599 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.975706 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.975844 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.975959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.976072 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:53Z","lastTransitionTime":"2025-10-01T06:17:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:53 crc kubenswrapper[4747]: I1001 06:17:53.986695 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.002151 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:53Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.016480 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.032964 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.048368 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.061082 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.078470 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.078509 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.078523 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.078541 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.078554 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.080952 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.091635 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.181273 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.181339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.181361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.181392 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.181413 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.275696 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.275828 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.276193 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:54 crc kubenswrapper[4747]: E1001 06:17:54.276429 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:54 crc kubenswrapper[4747]: E1001 06:17:54.276518 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:54 crc kubenswrapper[4747]: E1001 06:17:54.276626 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.283948 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.284001 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.284025 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.284055 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.284079 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.386297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.386353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.386378 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.386404 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.386424 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.490521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.490605 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.490627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.490654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.490684 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.594473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.594535 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.594560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.594591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.594612 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.697429 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.697958 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.698047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.698115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.698323 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.801501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.802031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.802101 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.802183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.802253 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.803055 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/3.log" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.804741 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/2.log" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.808574 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" exitCode=1 Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.808624 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.808781 4747 scope.go:117] "RemoveContainer" containerID="5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.810700 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:17:54 crc kubenswrapper[4747]: E1001 06:17:54.811055 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.828247 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.851475 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.863964 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.883081 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.901841 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.904854 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.904888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.904900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.904917 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.904931 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:54Z","lastTransitionTime":"2025-10-01T06:17:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.919527 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.938881 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.957400 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.974130 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:54 crc kubenswrapper[4747]: I1001 06:17:54.988231 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:54Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.007104 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.007830 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.008042 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.008187 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.008327 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.008490 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.046170 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e81c4ac72cf421037ceec5148817af6ddd261419d083ec4854bd366e3c8d9d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:25Z\\\",\\\"message\\\":\\\" 06:17:25.451915 6360 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1001 06:17:25.451938 6360 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1001 06:17:25.451981 6360 handler.go:208] Removed *v1.Node event handler 2\\\\nI1001 06:17:25.451990 6360 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1001 06:17:25.452000 6360 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1001 06:17:25.452033 6360 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1001 06:17:25.452069 6360 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1001 06:17:25.452095 6360 handler.go:208] Removed *v1.Node event handler 7\\\\nI1001 06:17:25.452107 6360 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1001 06:17:25.452038 6360 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1001 06:17:25.452214 6360 factory.go:656] Stopping watch factory\\\\nI1001 06:17:25.452219 6360 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452335 6360 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1001 06:17:25.452376 6360 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:24Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:54Z\\\",\\\"message\\\":\\\"df9-690dbab310cb}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211592 6714 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211681 6714 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.063063 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.085785 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.105399 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.111662 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.111886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.112026 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.112182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.112340 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.121901 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.140402 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.173279 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.215554 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.215604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.215619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.215641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.215657 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.276030 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:55 crc kubenswrapper[4747]: E1001 06:17:55.276263 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.289792 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.319058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.319119 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.319137 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.319165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.319184 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.421909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.421984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.422011 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.422042 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.422070 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.525132 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.525205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.525229 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.525258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.525281 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.628490 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.628564 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.628586 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.628615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.628640 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.731928 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.731990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.732012 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.732040 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.732061 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.814255 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/3.log" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.819928 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:17:55 crc kubenswrapper[4747]: E1001 06:17:55.820080 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.835498 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.835614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.835630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.835648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.835678 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.836069 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.851191 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eb46590-a412-4269-9c1d-bca1fac316a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f5e8ba6a4048e7b3b7609ed4aa459e35520f9de4ed46ad68d1de6ad7c41746e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.870676 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.897010 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.917708 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.936868 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.939309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.939360 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.939377 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.939401 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.939418 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:55Z","lastTransitionTime":"2025-10-01T06:17:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.951888 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.970268 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:55 crc kubenswrapper[4747]: I1001 06:17:55.990371 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:55Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.008859 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.026941 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.042001 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.042061 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.042078 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.042104 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.042120 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.045909 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.062871 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.080931 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.093843 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.112107 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.128332 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.145462 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.145695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.145782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.145861 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.145925 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.148783 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:54Z\\\",\\\"message\\\":\\\"df9-690dbab310cb}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211592 6714 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211681 6714 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.167186 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.248521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.248589 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.248615 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.248638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.248655 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.275732 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.275797 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.275921 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.276125 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.276416 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.276710 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.351802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.351871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.351890 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.351915 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.351933 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.455068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.455118 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.455135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.455157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.455174 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.558373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.558433 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.558456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.558485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.558509 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.662036 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.662110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.662135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.662170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.662195 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.764775 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.765047 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.765117 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.765191 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.765253 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.795602 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.795630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.795639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.795653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.795662 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.807182 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.810442 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.810480 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.810491 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.810515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.810530 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.823003 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.826118 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.826284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.826402 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.826500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.826589 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.845990 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.849675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.849727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.849818 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.849856 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.849877 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.866852 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.871680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.871823 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.871850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.871893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.871916 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.895400 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:56Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:56 crc kubenswrapper[4747]: E1001 06:17:56.895623 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.898406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.898456 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.898474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.898497 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:56 crc kubenswrapper[4747]: I1001 06:17:56.898514 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:56Z","lastTransitionTime":"2025-10-01T06:17:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.002160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.002227 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.002246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.002272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.002291 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.105406 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.105475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.105495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.105521 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.105540 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.208255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.208327 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.208350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.208384 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.208406 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.276567 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:57 crc kubenswrapper[4747]: E1001 06:17:57.276799 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.311609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.311682 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.311708 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.311738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.311801 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.415563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.415622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.415641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.415667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.415684 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.519619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.519676 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.519693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.519724 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.519744 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.623340 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.623381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.623391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.623408 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.623419 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.727954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.728013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.728031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.728055 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.728072 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.831080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.831948 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.832001 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.832028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.832048 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.935230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.935307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.935332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.935366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:57 crc kubenswrapper[4747]: I1001 06:17:57.935390 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:57Z","lastTransitionTime":"2025-10-01T06:17:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.038571 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.038654 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.038675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.038704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.038724 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.141718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.141782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.141794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.141808 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.141816 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.243876 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.243909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.243919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.243934 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.243946 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.275659 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.275715 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:17:58 crc kubenswrapper[4747]: E1001 06:17:58.275810 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:17:58 crc kubenswrapper[4747]: E1001 06:17:58.275914 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.275985 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:17:58 crc kubenswrapper[4747]: E1001 06:17:58.276096 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.346599 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.346666 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.346680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.346697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.346709 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.449919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.449967 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.449978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.449998 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.450010 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.553515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.553579 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.553596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.553628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.553645 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.656919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.656973 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.656990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.657013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.657031 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.760046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.760113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.760132 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.760158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.760176 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.863183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.863850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.863973 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.864069 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.864157 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.966937 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.967279 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.967399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.967520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:58 crc kubenswrapper[4747]: I1001 06:17:58.967639 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:58Z","lastTransitionTime":"2025-10-01T06:17:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.070862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.070901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.070911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.070930 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.070940 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.175253 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.175744 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.176283 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.176745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.176921 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.276277 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:17:59 crc kubenswrapper[4747]: E1001 06:17:59.276454 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.280687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.280746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.280793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.280815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.280830 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.296168 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.312321 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.331160 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.354585 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.369675 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.383622 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.383671 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.383687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.383708 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.383725 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.388067 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.407085 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.420924 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.441620 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.463057 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.486043 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.486088 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.486102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.486121 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.486135 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.496090 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:54Z\\\",\\\"message\\\":\\\"df9-690dbab310cb}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211592 6714 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211681 6714 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.516286 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.535789 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.555727 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.586102 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.588450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.588485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.588498 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.588515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.588527 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.608402 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.629375 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.644991 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.661604 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eb46590-a412-4269-9c1d-bca1fac316a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f5e8ba6a4048e7b3b7609ed4aa459e35520f9de4ed46ad68d1de6ad7c41746e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:17:59Z is after 2025-08-24T17:21:41Z" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.692097 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.692177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.692195 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.692218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.692234 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.794398 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.794452 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.794469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.794490 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.794507 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.897524 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.898885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.899075 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.899243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:17:59 crc kubenswrapper[4747]: I1001 06:17:59.899394 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:17:59Z","lastTransitionTime":"2025-10-01T06:17:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.002682 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.002797 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.002825 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.002853 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.002875 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.106155 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.106221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.106240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.106267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.106285 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.209220 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.209734 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.209985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.210167 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.210305 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.276538 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.276535 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.277072 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:00 crc kubenswrapper[4747]: E1001 06:18:00.277424 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:00 crc kubenswrapper[4747]: E1001 06:18:00.277720 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:00 crc kubenswrapper[4747]: E1001 06:18:00.277942 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.313475 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.313556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.313581 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.313613 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.313636 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.417233 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.418223 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.418663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.419009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.419220 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.523425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.523867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.524035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.524171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.524303 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.627679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.628068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.628086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.628113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.628134 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.730614 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.730683 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.730707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.730740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.730829 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.835177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.835501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.835687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.835913 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.836076 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.939219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.939268 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.939285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.939308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:00 crc kubenswrapper[4747]: I1001 06:18:00.939327 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:00Z","lastTransitionTime":"2025-10-01T06:18:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.043175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.043285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.043311 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.043413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.043475 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.146932 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.147004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.147028 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.147058 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.147080 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.250290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.250366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.250390 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.250419 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.250440 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.276233 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:01 crc kubenswrapper[4747]: E1001 06:18:01.276446 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.353744 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.353857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.353879 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.353910 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.353928 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.456664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.456720 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.456734 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.456780 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.456795 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.560405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.560714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.560895 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.561146 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.561359 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.664450 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.664505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.664522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.664544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.664560 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.767628 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.768160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.768448 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.768816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.769047 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.871930 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.871999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.872024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.872052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.872076 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.975609 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.975660 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.975677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.975701 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:01 crc kubenswrapper[4747]: I1001 06:18:01.975722 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:01Z","lastTransitionTime":"2025-10-01T06:18:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.079331 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.079393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.079415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.079443 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.079479 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.183185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.183255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.183277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.183305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.183332 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.276810 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.276862 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.276856 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.277019 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.277128 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.277298 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.286039 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.286103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.286128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.286158 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.286182 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.388924 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.388985 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.389002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.389025 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.389045 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.493008 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.493072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.493090 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.493116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.493137 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.548659 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.548929 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.548901425 +0000 UTC m=+147.958558504 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.595218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.595260 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.595270 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.595284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.595295 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.650553 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.650610 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.650642 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.650671 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.650811 4747 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.650940 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.650912307 +0000 UTC m=+148.060569376 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.650936 4747 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.651060 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.65102517 +0000 UTC m=+148.060682259 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.650838 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.651166 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.651204 4747 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.650946 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.651257 4747 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.651286 4747 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.651337 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.651302158 +0000 UTC m=+148.060959247 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:18:02 crc kubenswrapper[4747]: E1001 06:18:02.651380 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.651354739 +0000 UTC m=+148.061011828 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.698374 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.698457 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.698469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.698490 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.698501 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.801741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.801802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.801811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.801825 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.801834 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.904793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.904835 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.904843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.904857 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:02 crc kubenswrapper[4747]: I1001 06:18:02.904867 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:02Z","lastTransitionTime":"2025-10-01T06:18:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.008102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.008162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.008183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.008208 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.008228 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.111627 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.111683 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.111697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.111716 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.111730 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.214296 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.214425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.214445 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.214469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.214486 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.255969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:03 crc kubenswrapper[4747]: E1001 06:18:03.256152 4747 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:18:03 crc kubenswrapper[4747]: E1001 06:18:03.256270 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs podName:d04a872f-a6a7-45d3-aa62-be934b7266c2 nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.25624359 +0000 UTC m=+148.665900679 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs") pod "network-metrics-daemon-4g26h" (UID: "d04a872f-a6a7-45d3-aa62-be934b7266c2") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.276276 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:03 crc kubenswrapper[4747]: E1001 06:18:03.276461 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.317189 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.317249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.317266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.317290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.317306 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.420248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.420325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.420343 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.420373 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.420391 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.523260 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.523301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.523309 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.523322 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.523330 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.625399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.625499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.625527 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.625560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.625583 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.728630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.728687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.728704 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.728728 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.728746 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.831834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.831918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.831946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.831978 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.832003 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.934062 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.934128 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.934145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.934168 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:03 crc kubenswrapper[4747]: I1001 06:18:03.934184 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:03Z","lastTransitionTime":"2025-10-01T06:18:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.037277 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.037347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.037370 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.037400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.037421 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.140875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.140940 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.140961 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.140993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.141016 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.244293 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.244364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.244386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.244417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.244439 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.276102 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.276184 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:04 crc kubenswrapper[4747]: E1001 06:18:04.276291 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.276316 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:04 crc kubenswrapper[4747]: E1001 06:18:04.276500 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:04 crc kubenswrapper[4747]: E1001 06:18:04.276680 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.347865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.347929 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.347950 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.347977 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.348000 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.451518 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.451608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.451626 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.451650 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.451667 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.554875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.554921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.554932 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.554949 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.554962 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.660916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.660980 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.661000 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.661024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.661040 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.764798 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.764871 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.764886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.764904 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.764917 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.867923 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.867987 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.868004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.868029 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.868047 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.970562 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.970646 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.970670 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.970699 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:04 crc kubenswrapper[4747]: I1001 06:18:04.970717 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:04Z","lastTransitionTime":"2025-10-01T06:18:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.073833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.073888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.073904 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.073925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.073942 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.176956 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.177039 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.177053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.177070 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.177082 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.276830 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:05 crc kubenswrapper[4747]: E1001 06:18:05.277043 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.279841 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.279901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.279926 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.280000 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.280028 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.383055 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.383093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.383102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.383117 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.383127 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.485916 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.485947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.485955 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.485968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.485977 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.589036 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.589080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.589093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.589108 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.589120 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.692202 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.692258 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.692275 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.692299 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.692317 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.794668 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.794717 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.794734 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.794796 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.794814 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.897974 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.898034 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.898052 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.898076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:05 crc kubenswrapper[4747]: I1001 06:18:05.898094 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:05Z","lastTransitionTime":"2025-10-01T06:18:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.000799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.000873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.000894 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.000920 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.000938 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.103430 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.103461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.103469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.103483 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.103491 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.205608 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.205670 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.205680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.205694 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.205703 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.275694 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.275740 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:06 crc kubenswrapper[4747]: E1001 06:18:06.275850 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.275748 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:06 crc kubenswrapper[4747]: E1001 06:18:06.275976 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:06 crc kubenswrapper[4747]: E1001 06:18:06.276046 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.308366 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.308412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.308422 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.308438 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.308453 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.410928 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.411024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.411074 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.411097 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.411114 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.514368 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.514441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.514469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.514500 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.514522 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.617722 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.617819 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.617834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.617855 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.617867 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.720802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.720860 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.720875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.720894 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.720909 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.823634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.823687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.823695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.823707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.823717 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.926707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.926900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.926922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.926947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:06 crc kubenswrapper[4747]: I1001 06:18:06.926963 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:06Z","lastTransitionTime":"2025-10-01T06:18:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.029878 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.029946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.029968 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.029994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.030012 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.139111 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.139180 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.139207 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.139234 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.139252 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.160218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.160294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.160314 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.160344 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.160366 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: E1001 06:18:07.181537 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.186831 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.186876 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.186885 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.186903 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.186916 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: E1001 06:18:07.203797 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.207913 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.208031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.208099 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.208197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.208256 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: E1001 06:18:07.223474 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.228188 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.228251 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.228274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.228301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.228324 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: E1001 06:18:07.241095 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.244470 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.244517 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.244538 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.244563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.244583 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: E1001 06:18:07.259073 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:07Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:07 crc kubenswrapper[4747]: E1001 06:18:07.259295 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.261140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.261200 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.261224 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.261251 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.261273 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.275944 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:07 crc kubenswrapper[4747]: E1001 06:18:07.276180 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.364031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.364093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.364110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.364134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.364151 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.466923 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.467356 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.467551 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.467860 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.468069 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.570271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.570612 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.570824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.571007 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.571133 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.674635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.674695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.674714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.674738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.674792 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.777438 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.777721 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.777841 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.777930 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.778020 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.880604 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.880684 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.880707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.880738 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.880796 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.983315 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.983461 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.983485 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.983513 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:07 crc kubenswrapper[4747]: I1001 06:18:07.983534 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:07Z","lastTransitionTime":"2025-10-01T06:18:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.086424 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.086527 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.086559 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.086590 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.086615 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.189835 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.189900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.189918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.189944 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.189960 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.276443 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.276512 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.276563 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:08 crc kubenswrapper[4747]: E1001 06:18:08.277059 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:08 crc kubenswrapper[4747]: E1001 06:18:08.277182 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:08 crc kubenswrapper[4747]: E1001 06:18:08.277302 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.277559 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:18:08 crc kubenswrapper[4747]: E1001 06:18:08.277876 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.292894 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.293146 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.293339 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.293538 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.293730 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.397888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.397959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.397979 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.398006 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.398026 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.502054 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.502171 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.502189 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.502219 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.502237 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.605474 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.605559 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.605576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.605601 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.605618 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.709656 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.709698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.709707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.709724 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.709733 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.813444 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.813490 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.813499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.813514 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.813524 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.915956 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.916290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.916423 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.916571 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:08 crc kubenswrapper[4747]: I1001 06:18:08.916713 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:08Z","lastTransitionTime":"2025-10-01T06:18:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.020114 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.020174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.020193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.020218 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.020239 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.123430 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.123506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.123530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.123560 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.123586 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.227327 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.228053 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.228144 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.228183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.228211 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.276398 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:09 crc kubenswrapper[4747]: E1001 06:18:09.276606 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.296096 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.314000 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.325467 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.332122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.332195 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.332221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.332250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.332273 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.347041 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.367845 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.426986 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:54Z\\\",\\\"message\\\":\\\"df9-690dbab310cb}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211592 6714 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211681 6714 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.438405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.438465 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.438489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.438522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.438544 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.455914 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.467915 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eb46590-a412-4269-9c1d-bca1fac316a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f5e8ba6a4048e7b3b7609ed4aa459e35520f9de4ed46ad68d1de6ad7c41746e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.487456 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.504575 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.516328 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.526219 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.536135 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.540469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.540496 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.540506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.540520 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.540528 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.547855 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.563189 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.579137 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.595584 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.612889 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.627360 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:09Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.642835 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.642900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.642924 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.642954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.642976 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.745841 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.745886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.745902 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.745922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.745938 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.848137 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.848197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.848217 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.848243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.848262 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.950768 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.950815 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.950826 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.950843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:09 crc kubenswrapper[4747]: I1001 06:18:09.950854 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:09Z","lastTransitionTime":"2025-10-01T06:18:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.053864 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.054368 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.054519 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.054677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.054868 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.157537 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.157595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.157611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.157637 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.157654 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.260891 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.261746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.262002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.262271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.262480 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.276390 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.276393 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.276516 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:10 crc kubenswrapper[4747]: E1001 06:18:10.276731 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:10 crc kubenswrapper[4747]: E1001 06:18:10.277165 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:10 crc kubenswrapper[4747]: E1001 06:18:10.277246 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.366138 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.366512 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.366545 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.366572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.366592 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.469399 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.469480 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.469503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.469531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.469549 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.573082 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.573166 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.573185 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.573213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.573232 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.676148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.676230 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.676245 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.676262 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.676277 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.778795 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.778834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.778843 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.778855 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.778864 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.881746 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.881834 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.881845 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.881862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.881874 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.984318 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.984409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.984430 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.984458 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:10 crc kubenswrapper[4747]: I1001 06:18:10.984484 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:10Z","lastTransitionTime":"2025-10-01T06:18:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.087416 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.087489 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.087508 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.087534 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.087554 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.190802 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.190859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.190925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.190949 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.190963 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.276001 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:11 crc kubenswrapper[4747]: E1001 06:18:11.276514 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.294409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.294477 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.294492 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.294516 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.294532 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.397840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.398024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.398046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.398114 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.398133 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.500688 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.500786 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.500806 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.500833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.500850 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.604436 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.604473 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.604484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.604499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.604511 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.708182 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.708246 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.708266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.708289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.708307 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.811151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.811192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.811202 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.811215 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.811225 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.913661 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.913747 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.913832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.913865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:11 crc kubenswrapper[4747]: I1001 06:18:11.913890 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:11Z","lastTransitionTime":"2025-10-01T06:18:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.016787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.016858 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.016882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.016911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.017003 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.119529 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.119593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.119619 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.119653 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.119677 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.223495 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.223583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.223602 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.223630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.223648 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.276394 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.276431 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:12 crc kubenswrapper[4747]: E1001 06:18:12.276576 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.276658 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:12 crc kubenswrapper[4747]: E1001 06:18:12.276686 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:12 crc kubenswrapper[4747]: E1001 06:18:12.276869 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.326656 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.326710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.326727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.326782 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.326801 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.430226 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.430291 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.430313 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.430338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.430357 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.533175 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.533240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.533257 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.533281 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.533297 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.640252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.640317 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.640329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.640348 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.640366 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.743331 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.743380 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.743392 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.743410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.743421 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.845727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.845830 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.845849 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.845874 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.845892 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.948499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.948556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.948573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.948595 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:12 crc kubenswrapper[4747]: I1001 06:18:12.948612 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:12Z","lastTransitionTime":"2025-10-01T06:18:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.052244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.052296 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.052305 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.052324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.052335 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.154807 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.154880 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.154901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.154936 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.154985 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.258741 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.258838 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.258861 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.258888 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.258907 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.276779 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:13 crc kubenswrapper[4747]: E1001 06:18:13.277000 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.362271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.362317 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.362329 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.362345 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.362358 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.465634 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.465695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.465711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.465735 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.465788 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.569135 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.569211 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.569238 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.569267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.569292 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.672852 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.672921 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.672943 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.672969 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.672990 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.775817 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.775882 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.775899 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.775925 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.775942 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.879791 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.879865 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.879893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.879922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.879944 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.983073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.983130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.983147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.983169 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:13 crc kubenswrapper[4747]: I1001 06:18:13.983186 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:13Z","lastTransitionTime":"2025-10-01T06:18:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.086894 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.086984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.087002 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.087024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.087041 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.190710 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.190772 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.190787 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.190804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.190815 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.276035 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.276131 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:14 crc kubenswrapper[4747]: E1001 06:18:14.276398 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.276521 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:14 crc kubenswrapper[4747]: E1001 06:18:14.276557 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:14 crc kubenswrapper[4747]: E1001 06:18:14.278041 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.293833 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.293900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.293919 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.293941 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.293958 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.396679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.396786 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.396861 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.396891 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.396915 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.499993 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.500079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.500090 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.500109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.500119 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.602777 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.602850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.602867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.602893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.602910 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.705733 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.705832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.705849 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.705869 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.705884 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.808506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.808545 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.808556 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.808573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.808586 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.911713 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.911811 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.911832 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.911855 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:14 crc kubenswrapper[4747]: I1001 06:18:14.911876 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:14Z","lastTransitionTime":"2025-10-01T06:18:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.014596 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.014662 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.014674 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.014687 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.014698 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.118236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.118289 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.118308 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.118331 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.118347 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.222226 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.222290 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.222307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.222332 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.222350 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.276355 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:15 crc kubenswrapper[4747]: E1001 06:18:15.276590 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.325698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.325740 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.325767 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.325783 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.325792 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.429003 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.429098 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.429116 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.429139 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.429156 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.531737 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.531838 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.531862 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.531889 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.531910 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.634911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.635004 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.635030 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.635060 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.635084 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.738013 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.738076 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.738113 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.738153 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.738175 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.841522 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.841569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.841586 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.841611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.841629 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.944655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.944726 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.944799 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.944836 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:15 crc kubenswrapper[4747]: I1001 06:18:15.944855 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:15Z","lastTransitionTime":"2025-10-01T06:18:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.048594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.048663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.048681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.048705 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.048723 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.152280 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.152409 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.152435 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.152459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.152476 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.256388 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.256458 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.256476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.256501 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.256518 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.276165 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.276224 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.276234 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:16 crc kubenswrapper[4747]: E1001 06:18:16.276359 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:16 crc kubenswrapper[4747]: E1001 06:18:16.276526 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:16 crc kubenswrapper[4747]: E1001 06:18:16.276622 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.360232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.360346 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.360364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.360393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.360411 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.462946 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.463035 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.463064 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.463097 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.463122 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.565666 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.565724 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.565742 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.565788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.565808 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.668437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.668486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.668505 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.668528 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.668547 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.772250 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.772318 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.772336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.772361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.772424 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.875487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.875563 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.875580 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.875603 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.875620 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.978227 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.978275 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.978284 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.978301 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:16 crc kubenswrapper[4747]: I1001 06:18:16.978311 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:16Z","lastTransitionTime":"2025-10-01T06:18:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.081667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.081726 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.081748 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.081813 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.081833 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.184840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.184886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.184903 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.184927 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.184945 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.276224 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:17 crc kubenswrapper[4747]: E1001 06:18:17.276391 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.288073 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.288122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.288140 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.288162 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.288179 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.356288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.356334 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.356350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.356371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.356388 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: E1001 06:18:17.376847 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:17Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.382127 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.382174 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.382190 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.382214 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.382233 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: E1001 06:18:17.404513 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:17Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.409481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.409547 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.409572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.409597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.409621 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: E1001 06:18:17.431916 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:17Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.437326 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.437393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.437413 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.437441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.437461 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: E1001 06:18:17.459726 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:17Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.464410 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.464624 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.464875 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.465132 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.465312 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: E1001 06:18:17.485054 4747 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-01T06:18:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f048b45a-6d3e-4f45-bc91-5a73d3968d47\\\",\\\"systemUUID\\\":\\\"486dc906-2211-4bcf-95f6-cf6c55ee481c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:17Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:17 crc kubenswrapper[4747]: E1001 06:18:17.485391 4747 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.488020 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.488068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.488086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.488109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.488126 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.590984 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.591050 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.591079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.591109 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.591127 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.694267 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.694336 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.694354 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.694382 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.694397 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.797223 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.797575 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.797711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.797918 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.798103 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.901093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.901156 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.901179 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.901211 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:17 crc kubenswrapper[4747]: I1001 06:18:17.901232 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:17Z","lastTransitionTime":"2025-10-01T06:18:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.004641 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.005051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.005232 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.005386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.005527 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.108379 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.108437 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.108454 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.108476 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.108493 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.211481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.211543 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.211564 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.211591 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.211610 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.276239 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.276325 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.276633 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:18 crc kubenswrapper[4747]: E1001 06:18:18.276814 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:18 crc kubenswrapper[4747]: E1001 06:18:18.276949 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:18 crc kubenswrapper[4747]: E1001 06:18:18.277070 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.315075 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.315152 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.315178 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.315210 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.315234 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.417696 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.417859 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.417877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.417901 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.417919 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.520616 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.520675 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.520693 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.520715 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.520732 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.624405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.624470 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.624487 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.624511 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.624528 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.727698 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.727829 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.727850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.727958 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.727986 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.830819 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.830877 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.830892 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.830909 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.830923 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.933905 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.933967 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.933983 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.934009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:18 crc kubenswrapper[4747]: I1001 06:18:18.934028 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:18Z","lastTransitionTime":"2025-10-01T06:18:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.036298 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.036350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.036361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.036381 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.036394 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.138936 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.139014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.139024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.139043 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.139052 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.241666 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.241733 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.241850 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.241883 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.241902 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.277202 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.277286 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:18:19 crc kubenswrapper[4747]: E1001 06:18:19.277446 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:19 crc kubenswrapper[4747]: E1001 06:18:19.277524 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.297664 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e105764f6f9c301ba542d48aab8b68337fae6cb21aceb29733000d2e649589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://09b26aa5d45b09d4f9f18dd54ac96e6d339c9aeb0ad3cf81acf885f07c35193a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.321832 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38849139-d385-42a4-adab-687566065973\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:54Z\\\",\\\"message\\\":\\\"df9-690dbab310cb}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211592 6714 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1001 06:17:54.211681 6714 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-config-operator/metrics]} name:Service_openshift-config-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.161:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f32857b5-f652-4313-a0d7-455c3156dd99}] Until: Durable:\\\\u003cnil\\\\u003e\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:53Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2zjbg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-p48hw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.340455 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"669e0e5d-344c-47cb-87a7-1fa4ffd5e88b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00d8d6cd1ad3e44b5d6dba41765d85dad2c93af0cfda4d14693c7723915556c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://14f4855414dc7037b293dbb65a65021952e0445e00345cfe72abd2daf30f14e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hsxxq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:12Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-lnhpd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.345519 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.345576 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.345597 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.345629 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.345648 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.359218 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-4g26h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d04a872f-a6a7-45d3-aa62-be934b7266c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2jbwx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-4g26h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.374847 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0eb46590-a412-4269-9c1d-bca1fac316a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f5e8ba6a4048e7b3b7609ed4aa459e35520f9de4ed46ad68d1de6ad7c41746e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0e6c0f45945df898ef2838d574a0a1bad9dafb6a6608f336e1c92656d7018b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.397963 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7a6af4bc-0db2-4404-b0f2-19d0144950b5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c6753f260b8b0f1454a56c7855af103fc5babea7f4063d0162e4615cf2a5ec4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da6c0f3b8a43ccfde3b58c79b6ee014528978a611a167dc31a7cc571e7801f2b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d525dfb0d73d234edff2477030efa473b45d391c25efc9f15781e81c3c41af31\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://24d29cef16b584e0894198449a69233a2517f22e8aa7e00ddcb3e193c5d424bb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0deb3ff05cebd83c51e2c901f051b4f7f52750d7a0c7517dca12c37e7ba19e0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://47c478e7ed639b607bb7c9516aa2dd3b65d38bfe2265a63c635e8dd3a8f99687\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.443791 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"75825e9e-35d3-48b8-8be1-605ebbdcf522\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e9c0b4ae89a75b2192b1e17de0948e6783f936d770d83bf930b637c93c20ac1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6961c2b8a9c05bc9fabb24fb898c1bcc969caea8ea3a8891e52cc1e7db8cfb82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b43ea1df2c63303f3162043c94ed52c15611be736b2ca1ed8ff17de74edc81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df0f39144a820ee1c90a431b7915d6ab9b6ea0ec26fc6c88a8f1b716e8f917a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4ec0d865a96732935be392a8d3ee6ae32543e179400bfff5b2a011a95afc9224\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe5e2438e6fa09bbe73feb08ce9d4074471753b22eba411aece1b89bf6482428\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93ba8805ea85284f0bed6e20648fac059c87a92eea6ae510df711666824866d6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee9b53a1b98c721be3e8c8a6693132d61d9ada10e015c918cf14fc6a2ea310da\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.448496 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.448618 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.448632 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.448648 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.448659 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.467503 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4920acdeb9a782ec4a857b009715dfccd5d0e0031b9b1ed7c907d13007204ced\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.486918 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.502201 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-4zq78" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"67f55a21-76c4-4456-af5e-3f7f9c2b939d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f0bbc25d763e26a03072cd08dc9fb947ff7721af2b0c5357510884c09a73bf2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r2rzz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:17:01Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-4zq78\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.517830 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3ccae1da-a1ca-4792-a570-ab7ebd835b40\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://36b312f84d1a33eeb4daa77045ca0da02b85da4fcd825569c1f0aca35faf23b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9f014cdb29b46359a656b958c4385eaf27ca3caff71c5a21fa6cd114aa8818e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90b7acc15af2ac03023c9541a577e3f90790e397e9a6def25f03c89b985ce1b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ba5f9cc232f2ac6c77c4402aec3056f9f406670989b284cb4b80506dcfad65a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.536415 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.551425 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.551469 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.551479 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.551496 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.551506 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.555157 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dd4c124b7bb4085bd15724ecaccd70902474b359588c6adfb6330eadffbaf189\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.572004 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90df9e29-7482-4ab7-84c6-f3029df17a0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21e366a771a5d329ca2f2fc8e1bb5838cc60ac835df1308e0bbedda5ade0e935\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mvtj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gh9dg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.596253 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"57f2f957-14c5-4cef-81ae-b01b1693f15d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a297afdf0fb90a4b730c3d369da6fd764f4568958f754b75d10aaa467bd3947a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cdcb0b8d02b2f86fc0793e3274742f09b2c9cdcc35e8154bcbfafcee4e19c058\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1d42cde7692b266d930ac698b50d7fa0b7a8487ba54f448c946fc931bc2b06aa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://75ba8e4e2ec6cabb67559a0645044213175e10c673561292a95f78300087f5be\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:01Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4af4a86ef8a0540270890d7594d2064a4723c941fa87de401d9cb468fa016538\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4a9a6076d570cb1e3e378050d2b2d536dc1d9e2119de6473ab35a70f132a5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5168d17a15f12033bbcc48b8101cd4e840e10090effe9c49c1bfad509df6aecd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:17:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:17:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hml2x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-w5wxn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.615972 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dbf0278-afc0-4d69-9c4d-4430d499077e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d005a72a4508464e46cf86c9c347b040b94e66243125197b1ecfc851e9775c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2379c4c720c1c540235a19746449bd80db698055cee11b03937a739fdbcf7a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://429399ca37c5afa42bbe8695a8fa5de760ec79ec7584b764f0b886acce46e770\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://30d98e8896116c699c76145e2d2f35c85e919da897461b59ddccd00b9e4c6146\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-01T06:16:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:40Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:39Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.633957 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.650691 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-mpx8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d323e332-ebe0-4a35-a811-f484557e7d7a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eccb47c611bf9d951ddd8198b2c91dfc8057d682ac7ae3e5d54bdd14939b2609\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ggrj5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:58Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-mpx8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.653391 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.653455 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.653481 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.653511 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.653535 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.663968 4747 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pvmkj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2f2b780-a19d-4581-92f4-ca25c69a263c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:16:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-01T06:17:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-01T06:17:46Z\\\",\\\"message\\\":\\\"2025-10-01T06:17:00+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a\\\\n2025-10-01T06:17:00+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_9fcbd893-f91f-4aa6-a352-7190db3fe27a to /host/opt/cni/bin/\\\\n2025-10-01T06:17:01Z [verbose] multus-daemon started\\\\n2025-10-01T06:17:01Z [verbose] Readiness Indicator file check\\\\n2025-10-01T06:17:46Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-01T06:16:59Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-01T06:17:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4p76x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-01T06:16:59Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pvmkj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-01T06:18:19Z is after 2025-08-24T17:21:41Z" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.755988 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.756025 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.756033 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.756046 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.756056 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.858008 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.858072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.858093 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.858122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.858143 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.960644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.960680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.960688 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.960700 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:19 crc kubenswrapper[4747]: I1001 06:18:19.960708 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:19Z","lastTransitionTime":"2025-10-01T06:18:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.063677 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.063745 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.063793 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.063816 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.063833 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.166057 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.166102 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.166115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.166131 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.166142 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.268278 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.268316 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.268325 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.268338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.268347 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.276473 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.276521 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:20 crc kubenswrapper[4747]: E1001 06:18:20.276597 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.276617 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:20 crc kubenswrapper[4747]: E1001 06:18:20.276816 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:20 crc kubenswrapper[4747]: E1001 06:18:20.276981 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.371353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.371416 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.371434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.371460 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.371479 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.474074 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.474142 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.474160 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.474183 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.474204 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.577575 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.577639 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.577656 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.577681 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.577698 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.680617 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.680679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.680695 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.680718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.680735 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.784122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.784181 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.784197 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.784220 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.784236 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.887534 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.887655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.887678 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.887714 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.887734 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.990720 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.990824 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.990842 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.990870 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:20 crc kubenswrapper[4747]: I1001 06:18:20.990889 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:20Z","lastTransitionTime":"2025-10-01T06:18:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.093959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.094024 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.094042 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.094066 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.094085 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.197022 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.197092 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.197110 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.197134 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.197156 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.276441 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:21 crc kubenswrapper[4747]: E1001 06:18:21.276706 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.299895 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.299954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.299971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.299999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.300021 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.403362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.403405 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.403415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.403434 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.403448 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.507014 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.507094 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.507115 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.507145 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.507169 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.610148 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.610205 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.610221 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.610244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.610263 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.713353 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.713451 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.713477 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.713506 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.713530 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.816900 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.816954 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.816971 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.816994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.817022 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.919215 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.919297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.919319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.919350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:21 crc kubenswrapper[4747]: I1001 06:18:21.919366 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:21Z","lastTransitionTime":"2025-10-01T06:18:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.023350 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.023400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.023417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.023444 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.023461 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.127122 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.127376 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.127415 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.127449 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.127475 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.231664 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.231804 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.231828 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.231858 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.231881 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.276254 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:22 crc kubenswrapper[4747]: E1001 06:18:22.276445 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.276730 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:22 crc kubenswrapper[4747]: E1001 06:18:22.276887 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.277025 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:22 crc kubenswrapper[4747]: E1001 06:18:22.277187 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.335282 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.335347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.335364 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.335394 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.335412 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.438453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.438514 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.438530 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.438558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.438576 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.541600 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.541661 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.541679 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.541707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.541725 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.644327 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.644400 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.644412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.644432 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.644444 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.747192 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.747252 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.747272 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.747294 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.747311 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.849959 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.850009 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.850018 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.850031 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.850041 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.954324 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.954403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.954428 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.954459 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:22 crc kubenswrapper[4747]: I1001 06:18:22.954481 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:22Z","lastTransitionTime":"2025-10-01T06:18:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.058015 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.058080 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.058097 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.058123 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.058140 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.160063 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.160127 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.160147 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.160177 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.160194 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.262990 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.263050 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.263065 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.263086 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.263102 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.276343 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:23 crc kubenswrapper[4747]: E1001 06:18:23.276528 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.366299 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.366361 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.366384 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.366417 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.366440 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.469635 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.469719 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.469788 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.469821 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.469843 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.573157 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.573320 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.573347 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.573421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.573446 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.677170 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.677243 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.677266 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.677297 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.677329 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.781067 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.781130 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.781151 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.781181 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.781203 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.889330 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.889403 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.889423 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.889453 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.889495 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.993426 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.993527 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.993547 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.993573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:23 crc kubenswrapper[4747]: I1001 06:18:23.993591 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:23Z","lastTransitionTime":"2025-10-01T06:18:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.097663 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.097727 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.097747 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.097808 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.097826 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.201149 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.201225 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.201249 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.201274 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.201292 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.276741 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.276894 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.276926 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:24 crc kubenswrapper[4747]: E1001 06:18:24.277045 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:24 crc kubenswrapper[4747]: E1001 06:18:24.277508 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:24 crc kubenswrapper[4747]: E1001 06:18:24.277658 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.304392 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.304421 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.304428 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.304441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.304451 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.408051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.408094 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.408106 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.408127 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.408140 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.511947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.512019 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.512043 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.512072 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.512095 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.615393 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.615558 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.615573 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.615590 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.615602 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.718497 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.718533 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.718544 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.718561 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.718573 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.821191 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.821263 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.821288 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.821319 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.821345 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.924292 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.924363 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.924386 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.924416 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:24 crc kubenswrapper[4747]: I1001 06:18:24.924441 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:24Z","lastTransitionTime":"2025-10-01T06:18:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.028136 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.028196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.028222 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.028254 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.028278 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.130967 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.131029 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.131051 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.131079 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.131098 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.234412 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.234467 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.234484 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.234503 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.234516 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.276179 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:25 crc kubenswrapper[4747]: E1001 06:18:25.276407 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.336867 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.336912 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.336922 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.336938 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.336947 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.440165 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.440203 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.440213 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.440228 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.440237 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.543285 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.543337 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.543351 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.543371 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.543386 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.646307 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.646338 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.646348 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.646362 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.646372 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.748644 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.748676 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.748684 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.748697 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.748706 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.851525 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.851584 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.851594 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.851607 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.851618 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.954486 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.954531 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.954543 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.954559 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:25 crc kubenswrapper[4747]: I1001 06:18:25.954570 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:25Z","lastTransitionTime":"2025-10-01T06:18:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.056569 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.056638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.056655 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.056680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.056698 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.159529 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.159572 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.159583 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.159599 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.159611 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.263141 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.263196 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.263209 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.263227 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.263240 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.276376 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.276435 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.276478 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:26 crc kubenswrapper[4747]: E1001 06:18:26.276608 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:26 crc kubenswrapper[4747]: E1001 06:18:26.276731 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:26 crc kubenswrapper[4747]: E1001 06:18:26.276836 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.365499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.365570 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.365579 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.365593 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.365603 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.468193 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.468244 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.468255 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.468271 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.468284 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.571441 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.571499 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.571515 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.571539 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.571557 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.674010 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.674061 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.674077 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.674103 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.674120 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.778999 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.779044 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.779068 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.779091 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.779109 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.882248 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.882526 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.882630 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.882718 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.882832 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.985809 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.986240 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.986435 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.986657 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:26 crc kubenswrapper[4747]: I1001 06:18:26.986887 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:26Z","lastTransitionTime":"2025-10-01T06:18:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.089886 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.090236 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.090439 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.090640 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.090833 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:27Z","lastTransitionTime":"2025-10-01T06:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.193733 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.193840 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.193911 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.193939 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.193997 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:27Z","lastTransitionTime":"2025-10-01T06:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.276423 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:27 crc kubenswrapper[4747]: E1001 06:18:27.276641 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.296471 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.296611 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.296636 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.296667 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.296689 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:27Z","lastTransitionTime":"2025-10-01T06:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.399796 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.399873 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.399896 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.399927 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.399952 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:27Z","lastTransitionTime":"2025-10-01T06:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.502726 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.502838 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.502863 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.502893 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.502914 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:27Z","lastTransitionTime":"2025-10-01T06:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.605947 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.605994 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.606006 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.606025 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.606040 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:27Z","lastTransitionTime":"2025-10-01T06:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.709638 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.709680 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.709691 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.709707 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.709718 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:27Z","lastTransitionTime":"2025-10-01T06:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.734647 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.734711 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.734729 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.734794 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.734822 4747 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-01T06:18:27Z","lastTransitionTime":"2025-10-01T06:18:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.795039 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572"] Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.795580 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.798634 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.798875 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.799074 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.799258 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.841154 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=60.841125853 podStartE2EDuration="1m0.841125853s" podCreationTimestamp="2025-10-01 06:17:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:27.818343272 +0000 UTC m=+109.228000361" watchObservedRunningTime="2025-10-01 06:18:27.841125853 +0000 UTC m=+109.250782932" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.856546 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-mpx8t" podStartSLOduration=89.856520266 podStartE2EDuration="1m29.856520266s" podCreationTimestamp="2025-10-01 06:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:27.856147927 +0000 UTC m=+109.265804986" watchObservedRunningTime="2025-10-01 06:18:27.856520266 +0000 UTC m=+109.266177355" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.875929 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-pvmkj" podStartSLOduration=89.87590614 podStartE2EDuration="1m29.87590614s" podCreationTimestamp="2025-10-01 06:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:27.875606603 +0000 UTC m=+109.285263662" watchObservedRunningTime="2025-10-01 06:18:27.87590614 +0000 UTC m=+109.285563219" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.939143 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e11dc2e3-c444-46ca-be44-27af28f4d1d9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.939234 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e11dc2e3-c444-46ca-be44-27af28f4d1d9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.939275 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e11dc2e3-c444-46ca-be44-27af28f4d1d9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.939317 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e11dc2e3-c444-46ca-be44-27af28f4d1d9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.939350 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e11dc2e3-c444-46ca-be44-27af28f4d1d9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.940653 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-lnhpd" podStartSLOduration=87.940632942 podStartE2EDuration="1m27.940632942s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:27.940510848 +0000 UTC m=+109.350167897" watchObservedRunningTime="2025-10-01 06:18:27.940632942 +0000 UTC m=+109.350290021" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.951086 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=32.951051988 podStartE2EDuration="32.951051988s" podCreationTimestamp="2025-10-01 06:17:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:27.950347959 +0000 UTC m=+109.360005048" watchObservedRunningTime="2025-10-01 06:18:27.951051988 +0000 UTC m=+109.360709077" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.967884 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=89.967861966 podStartE2EDuration="1m29.967861966s" podCreationTimestamp="2025-10-01 06:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:27.967366604 +0000 UTC m=+109.377023693" watchObservedRunningTime="2025-10-01 06:18:27.967861966 +0000 UTC m=+109.377519045" Oct 01 06:18:27 crc kubenswrapper[4747]: I1001 06:18:27.995549 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=86.995534313 podStartE2EDuration="1m26.995534313s" podCreationTimestamp="2025-10-01 06:17:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:27.991190762 +0000 UTC m=+109.400847851" watchObservedRunningTime="2025-10-01 06:18:27.995534313 +0000 UTC m=+109.405191362" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.041054 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e11dc2e3-c444-46ca-be44-27af28f4d1d9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.041440 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e11dc2e3-c444-46ca-be44-27af28f4d1d9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.041656 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e11dc2e3-c444-46ca-be44-27af28f4d1d9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.041941 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e11dc2e3-c444-46ca-be44-27af28f4d1d9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.042186 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/e11dc2e3-c444-46ca-be44-27af28f4d1d9-service-ca\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.042044 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/e11dc2e3-c444-46ca-be44-27af28f4d1d9-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.042207 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e11dc2e3-c444-46ca-be44-27af28f4d1d9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.042433 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/e11dc2e3-c444-46ca-be44-27af28f4d1d9-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.058903 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e11dc2e3-c444-46ca-be44-27af28f4d1d9-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.062704 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e11dc2e3-c444-46ca-be44-27af28f4d1d9-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-6r572\" (UID: \"e11dc2e3-c444-46ca-be44-27af28f4d1d9\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.063416 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=83.063399593 podStartE2EDuration="1m23.063399593s" podCreationTimestamp="2025-10-01 06:17:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:28.062540602 +0000 UTC m=+109.472197701" watchObservedRunningTime="2025-10-01 06:18:28.063399593 +0000 UTC m=+109.473056652" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.108203 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podStartSLOduration=90.108173316 podStartE2EDuration="1m30.108173316s" podCreationTimestamp="2025-10-01 06:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:28.106617276 +0000 UTC m=+109.516274335" watchObservedRunningTime="2025-10-01 06:18:28.108173316 +0000 UTC m=+109.517830405" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.115992 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.151476 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-w5wxn" podStartSLOduration=90.15145096 podStartE2EDuration="1m30.15145096s" podCreationTimestamp="2025-10-01 06:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:28.151119011 +0000 UTC m=+109.560776100" watchObservedRunningTime="2025-10-01 06:18:28.15145096 +0000 UTC m=+109.561108049" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.173541 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-4zq78" podStartSLOduration=89.173518223 podStartE2EDuration="1m29.173518223s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:28.171797499 +0000 UTC m=+109.581454588" watchObservedRunningTime="2025-10-01 06:18:28.173518223 +0000 UTC m=+109.583175282" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.275806 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.275918 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:28 crc kubenswrapper[4747]: E1001 06:18:28.275948 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.276040 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:28 crc kubenswrapper[4747]: E1001 06:18:28.276441 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:28 crc kubenswrapper[4747]: E1001 06:18:28.276572 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.932335 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" event={"ID":"e11dc2e3-c444-46ca-be44-27af28f4d1d9","Type":"ContainerStarted","Data":"6cde732eab652408d00c1990db6dea5e25434475f804a01557308a7235a0697b"} Oct 01 06:18:28 crc kubenswrapper[4747]: I1001 06:18:28.932409 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" event={"ID":"e11dc2e3-c444-46ca-be44-27af28f4d1d9","Type":"ContainerStarted","Data":"9a66349110e8b96f469a1cf88a921bed9486261ed99ffc0de1b8beaf900735f2"} Oct 01 06:18:29 crc kubenswrapper[4747]: I1001 06:18:29.275896 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:29 crc kubenswrapper[4747]: E1001 06:18:29.277640 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:30 crc kubenswrapper[4747]: I1001 06:18:30.275888 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:30 crc kubenswrapper[4747]: I1001 06:18:30.275924 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:30 crc kubenswrapper[4747]: I1001 06:18:30.276170 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:30 crc kubenswrapper[4747]: E1001 06:18:30.277140 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:30 crc kubenswrapper[4747]: E1001 06:18:30.277655 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:30 crc kubenswrapper[4747]: E1001 06:18:30.277873 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:31 crc kubenswrapper[4747]: I1001 06:18:31.276585 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:31 crc kubenswrapper[4747]: E1001 06:18:31.276815 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.276176 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.276196 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.276283 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:32 crc kubenswrapper[4747]: E1001 06:18:32.276794 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:32 crc kubenswrapper[4747]: E1001 06:18:32.276972 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:32 crc kubenswrapper[4747]: E1001 06:18:32.277064 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.277371 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:18:32 crc kubenswrapper[4747]: E1001 06:18:32.277658 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-p48hw_openshift-ovn-kubernetes(38849139-d385-42a4-adab-687566065973)\"" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.949080 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/1.log" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.949805 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/0.log" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.949877 4747 generic.go:334] "Generic (PLEG): container finished" podID="e2f2b780-a19d-4581-92f4-ca25c69a263c" containerID="9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5" exitCode=1 Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.949925 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvmkj" event={"ID":"e2f2b780-a19d-4581-92f4-ca25c69a263c","Type":"ContainerDied","Data":"9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5"} Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.949974 4747 scope.go:117] "RemoveContainer" containerID="2d69a228042b89335216f9d43671bbc661f788bdd51e649f4e5b3d3bc1f61b8d" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.950690 4747 scope.go:117] "RemoveContainer" containerID="9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5" Oct 01 06:18:32 crc kubenswrapper[4747]: E1001 06:18:32.951122 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-pvmkj_openshift-multus(e2f2b780-a19d-4581-92f4-ca25c69a263c)\"" pod="openshift-multus/multus-pvmkj" podUID="e2f2b780-a19d-4581-92f4-ca25c69a263c" Oct 01 06:18:32 crc kubenswrapper[4747]: I1001 06:18:32.976789 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-6r572" podStartSLOduration=93.97671431 podStartE2EDuration="1m33.97671431s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:28.955147742 +0000 UTC m=+110.364804811" watchObservedRunningTime="2025-10-01 06:18:32.97671431 +0000 UTC m=+114.386371419" Oct 01 06:18:33 crc kubenswrapper[4747]: I1001 06:18:33.275956 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:33 crc kubenswrapper[4747]: E1001 06:18:33.276212 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:33 crc kubenswrapper[4747]: I1001 06:18:33.955373 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/1.log" Oct 01 06:18:34 crc kubenswrapper[4747]: I1001 06:18:34.276115 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:34 crc kubenswrapper[4747]: I1001 06:18:34.276121 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:34 crc kubenswrapper[4747]: I1001 06:18:34.276135 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:34 crc kubenswrapper[4747]: E1001 06:18:34.276389 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:34 crc kubenswrapper[4747]: E1001 06:18:34.276848 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:34 crc kubenswrapper[4747]: E1001 06:18:34.276927 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:35 crc kubenswrapper[4747]: I1001 06:18:35.276281 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:35 crc kubenswrapper[4747]: E1001 06:18:35.276452 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:36 crc kubenswrapper[4747]: I1001 06:18:36.276865 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:36 crc kubenswrapper[4747]: I1001 06:18:36.276914 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:36 crc kubenswrapper[4747]: I1001 06:18:36.276878 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:36 crc kubenswrapper[4747]: E1001 06:18:36.277021 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:36 crc kubenswrapper[4747]: E1001 06:18:36.277135 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:36 crc kubenswrapper[4747]: E1001 06:18:36.277337 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:37 crc kubenswrapper[4747]: I1001 06:18:37.276332 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:37 crc kubenswrapper[4747]: E1001 06:18:37.276522 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:38 crc kubenswrapper[4747]: I1001 06:18:38.275977 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:38 crc kubenswrapper[4747]: I1001 06:18:38.276301 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:38 crc kubenswrapper[4747]: I1001 06:18:38.276355 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:38 crc kubenswrapper[4747]: E1001 06:18:38.276463 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:38 crc kubenswrapper[4747]: E1001 06:18:38.276602 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:38 crc kubenswrapper[4747]: E1001 06:18:38.276741 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:39 crc kubenswrapper[4747]: E1001 06:18:39.260350 4747 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 01 06:18:39 crc kubenswrapper[4747]: I1001 06:18:39.276409 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:39 crc kubenswrapper[4747]: E1001 06:18:39.278272 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:39 crc kubenswrapper[4747]: E1001 06:18:39.413994 4747 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 06:18:40 crc kubenswrapper[4747]: I1001 06:18:40.276700 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:40 crc kubenswrapper[4747]: I1001 06:18:40.276820 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:40 crc kubenswrapper[4747]: I1001 06:18:40.276707 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:40 crc kubenswrapper[4747]: E1001 06:18:40.276955 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:40 crc kubenswrapper[4747]: E1001 06:18:40.277101 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:40 crc kubenswrapper[4747]: E1001 06:18:40.277222 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:41 crc kubenswrapper[4747]: I1001 06:18:41.276201 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:41 crc kubenswrapper[4747]: E1001 06:18:41.276953 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:42 crc kubenswrapper[4747]: I1001 06:18:42.276223 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:42 crc kubenswrapper[4747]: I1001 06:18:42.276314 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:42 crc kubenswrapper[4747]: E1001 06:18:42.276460 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:42 crc kubenswrapper[4747]: I1001 06:18:42.276275 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:42 crc kubenswrapper[4747]: E1001 06:18:42.276602 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:42 crc kubenswrapper[4747]: E1001 06:18:42.276708 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:43 crc kubenswrapper[4747]: I1001 06:18:43.276164 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:43 crc kubenswrapper[4747]: E1001 06:18:43.276363 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:44 crc kubenswrapper[4747]: I1001 06:18:44.275735 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:44 crc kubenswrapper[4747]: I1001 06:18:44.275854 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:44 crc kubenswrapper[4747]: E1001 06:18:44.275939 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:44 crc kubenswrapper[4747]: I1001 06:18:44.275854 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:44 crc kubenswrapper[4747]: E1001 06:18:44.276089 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:44 crc kubenswrapper[4747]: E1001 06:18:44.276141 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:44 crc kubenswrapper[4747]: E1001 06:18:44.415154 4747 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 06:18:45 crc kubenswrapper[4747]: I1001 06:18:45.276196 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:45 crc kubenswrapper[4747]: E1001 06:18:45.276424 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:46 crc kubenswrapper[4747]: I1001 06:18:46.276326 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:46 crc kubenswrapper[4747]: I1001 06:18:46.276405 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:46 crc kubenswrapper[4747]: E1001 06:18:46.276584 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:46 crc kubenswrapper[4747]: I1001 06:18:46.276399 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:46 crc kubenswrapper[4747]: E1001 06:18:46.276744 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:46 crc kubenswrapper[4747]: I1001 06:18:46.276851 4747 scope.go:117] "RemoveContainer" containerID="9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5" Oct 01 06:18:46 crc kubenswrapper[4747]: E1001 06:18:46.276971 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:47 crc kubenswrapper[4747]: I1001 06:18:47.003270 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/1.log" Oct 01 06:18:47 crc kubenswrapper[4747]: I1001 06:18:47.003665 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvmkj" event={"ID":"e2f2b780-a19d-4581-92f4-ca25c69a263c","Type":"ContainerStarted","Data":"a3b36be96e97a5e64fec3b1e8acc8ceb3e2ab46403b34da703e1f97a3e702b3d"} Oct 01 06:18:47 crc kubenswrapper[4747]: I1001 06:18:47.276419 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:47 crc kubenswrapper[4747]: E1001 06:18:47.277121 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:47 crc kubenswrapper[4747]: I1001 06:18:47.277686 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.009552 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/3.log" Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.013682 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerStarted","Data":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.014319 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.052238 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podStartSLOduration=110.052212912 podStartE2EDuration="1m50.052212912s" podCreationTimestamp="2025-10-01 06:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:18:48.051225988 +0000 UTC m=+129.460883067" watchObservedRunningTime="2025-10-01 06:18:48.052212912 +0000 UTC m=+129.461869991" Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.173330 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-4g26h"] Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.173466 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:48 crc kubenswrapper[4747]: E1001 06:18:48.173605 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.276311 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.276418 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:48 crc kubenswrapper[4747]: E1001 06:18:48.276450 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:48 crc kubenswrapper[4747]: E1001 06:18:48.276599 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:48 crc kubenswrapper[4747]: I1001 06:18:48.276710 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:48 crc kubenswrapper[4747]: E1001 06:18:48.276842 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:49 crc kubenswrapper[4747]: E1001 06:18:49.416389 4747 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 06:18:50 crc kubenswrapper[4747]: I1001 06:18:50.276371 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:50 crc kubenswrapper[4747]: I1001 06:18:50.276491 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:50 crc kubenswrapper[4747]: I1001 06:18:50.276505 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:50 crc kubenswrapper[4747]: E1001 06:18:50.276651 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:50 crc kubenswrapper[4747]: I1001 06:18:50.276928 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:50 crc kubenswrapper[4747]: E1001 06:18:50.277072 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:50 crc kubenswrapper[4747]: E1001 06:18:50.277167 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:50 crc kubenswrapper[4747]: E1001 06:18:50.277318 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:52 crc kubenswrapper[4747]: I1001 06:18:52.276168 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:52 crc kubenswrapper[4747]: I1001 06:18:52.276238 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:52 crc kubenswrapper[4747]: E1001 06:18:52.276417 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:52 crc kubenswrapper[4747]: I1001 06:18:52.276491 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:52 crc kubenswrapper[4747]: I1001 06:18:52.276491 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:52 crc kubenswrapper[4747]: E1001 06:18:52.276645 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:52 crc kubenswrapper[4747]: E1001 06:18:52.276732 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:52 crc kubenswrapper[4747]: E1001 06:18:52.276908 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:54 crc kubenswrapper[4747]: I1001 06:18:54.276114 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:54 crc kubenswrapper[4747]: I1001 06:18:54.276120 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:54 crc kubenswrapper[4747]: E1001 06:18:54.276360 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-4g26h" podUID="d04a872f-a6a7-45d3-aa62-be934b7266c2" Oct 01 06:18:54 crc kubenswrapper[4747]: I1001 06:18:54.276140 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:54 crc kubenswrapper[4747]: I1001 06:18:54.276120 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:54 crc kubenswrapper[4747]: E1001 06:18:54.276489 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 01 06:18:54 crc kubenswrapper[4747]: E1001 06:18:54.276578 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 01 06:18:54 crc kubenswrapper[4747]: E1001 06:18:54.276727 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 01 06:18:54 crc kubenswrapper[4747]: I1001 06:18:54.547249 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.275810 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.275918 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.276032 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.276087 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.278839 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.278890 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.278900 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.279177 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.281043 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 01 06:18:56 crc kubenswrapper[4747]: I1001 06:18:56.281494 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.638528 4747 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.681502 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.682256 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.683639 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-csw7c"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.684289 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.688773 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.689924 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.694782 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.694907 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.694980 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.695947 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.696147 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-nsd9h"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.696716 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.696959 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.697421 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.697636 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.701248 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.701704 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.702199 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.719688 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8jxsb"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.724152 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.724579 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.725455 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.726287 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.726372 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.726679 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.731340 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q2zvk"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.732091 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.738594 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.739636 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.739898 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.740015 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.740129 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.740166 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.740265 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.740414 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.740676 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.741147 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.741418 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.741658 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.741849 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.742027 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.742214 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.742319 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-bxlnn"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.742885 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bxlnn" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.743295 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qnv2z"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.744038 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.745907 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.746194 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vh9mz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.747543 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.747871 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.748070 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.747870 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.748528 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.749004 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.749290 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.749516 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.750273 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.750371 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.750283 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.751126 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.751234 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.751311 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.751553 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.751658 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.751713 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.752028 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.752566 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.752638 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.753282 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.753693 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.754895 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8mc8d"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.755510 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.755600 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.756262 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.761934 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.761981 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762066 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762119 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762190 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762277 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762300 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762393 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762440 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762526 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762582 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762637 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762675 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762731 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762640 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.762989 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.763024 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.770729 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.774189 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.774555 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.774811 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.775355 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.789467 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wk2wx"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.790092 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.790272 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.790448 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.803068 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.803646 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.803818 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kcszh"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.804259 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.804424 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.804984 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.805286 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.805950 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806342 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806517 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63497a4-0ba8-48a5-88af-dc20ef283130-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806551 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806573 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-client-ca\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806589 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-serving-cert\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806616 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806632 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806650 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806674 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/98892cff-c8b2-48af-aa53-a3cc727ecf46-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6v524\" (UID: \"98892cff-c8b2-48af-aa53-a3cc727ecf46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806690 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806705 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-service-ca\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806712 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.806722 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-service-ca-bundle\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810340 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63497a4-0ba8-48a5-88af-dc20ef283130-config\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810367 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lttk4\" (UniqueName: \"kubernetes.io/projected/aa4e6638-81d2-47ff-8bea-beb731e7a905-kube-api-access-lttk4\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810388 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810409 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b55da778-ff99-4064-a60e-ee50f4f4f8e6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810444 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmsnl\" (UniqueName: \"kubernetes.io/projected/b55da778-ff99-4064-a60e-ee50f4f4f8e6-kube-api-access-mmsnl\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810481 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-oauth-serving-cert\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810499 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b55da778-ff99-4064-a60e-ee50f4f4f8e6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810522 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6dwk\" (UniqueName: \"kubernetes.io/projected/98892cff-c8b2-48af-aa53-a3cc727ecf46-kube-api-access-r6dwk\") pod \"cluster-samples-operator-665b6dd947-6v524\" (UID: \"98892cff-c8b2-48af-aa53-a3cc727ecf46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810540 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810574 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-trusted-ca-bundle\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810592 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-config\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.809638 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810617 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9b9nt\" (UniqueName: \"kubernetes.io/projected/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-kube-api-access-9b9nt\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810635 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810650 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810671 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-dir\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810696 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b55da778-ff99-4064-a60e-ee50f4f4f8e6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810712 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.809659 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810764 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-policies\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810783 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-serving-cert\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810799 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-config\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.809719 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810816 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whl8f\" (UniqueName: \"kubernetes.io/projected/2702589d-bd8c-4401-a5d9-2d57c88f33f6-kube-api-access-whl8f\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810832 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-config\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810853 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-oauth-config\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810869 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa4e6638-81d2-47ff-8bea-beb731e7a905-serving-cert\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810885 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810902 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810917 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hzpg\" (UniqueName: \"kubernetes.io/projected/e63497a4-0ba8-48a5-88af-dc20ef283130-kube-api-access-7hzpg\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.810932 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r2th\" (UniqueName: \"kubernetes.io/projected/b35796dc-d78b-48ec-be7b-45d072b20fe8-kube-api-access-8r2th\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.809792 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.811607 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.811808 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.811823 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.813282 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.813353 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.815083 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.817901 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.820803 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-5nq8g"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.821220 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.821614 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.821735 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.821854 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.821945 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.822327 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.822508 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.822572 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.823015 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.823294 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.823619 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.824160 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.824380 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.824556 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.824729 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.824951 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.825048 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.825109 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.825373 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.825500 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.825659 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.825798 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.825863 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.825996 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826118 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826127 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826209 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826252 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826479 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826572 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826629 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826665 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.826781 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.827685 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.828244 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.832660 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.836207 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.836649 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.839206 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-284zz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.840083 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.847685 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.848266 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kxgrb"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.855366 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-zgvwc"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.855591 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.856033 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.857392 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.858520 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.860422 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.862646 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.863497 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.865431 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.867302 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.872302 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.873199 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.873879 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-9tnsf"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.874153 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.874506 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.874793 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.875532 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.875904 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.876887 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.877261 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8jxsb"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.878423 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-csw7c"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.879475 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.879688 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.880365 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-fs4jt"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.881264 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.882278 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q2zvk"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.884209 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-zgvwc"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.885312 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.889379 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.893565 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.894468 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.897715 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.900130 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.905553 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.909716 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-nsd9h"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.910838 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911520 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-serving-cert\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911553 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-client-ca\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911596 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911619 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-metrics-certs\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911635 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911670 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911687 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfhmd\" (UniqueName: \"kubernetes.io/projected/0b633eab-bed0-436d-ad6d-bd7f315dc172-kube-api-access-jfhmd\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911710 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/98892cff-c8b2-48af-aa53-a3cc727ecf46-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6v524\" (UID: \"98892cff-c8b2-48af-aa53-a3cc727ecf46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911725 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-default-certificate\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911843 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-284zz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.911963 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912012 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4w44\" (UniqueName: \"kubernetes.io/projected/d4baa33b-4274-46b2-83d4-8e80ad9542c8-kube-api-access-m4w44\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912052 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e9b3bf47-d12e-437e-8b8b-15fda1a2be69-metrics-tls\") pod \"dns-operator-744455d44c-kcszh\" (UID: \"e9b3bf47-d12e-437e-8b8b-15fda1a2be69\") " pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912073 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-service-ca\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912090 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ecd114c8-714d-447c-a9d6-49be4118f010-serving-cert\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912109 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnh76\" (UniqueName: \"kubernetes.io/projected/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-kube-api-access-dnh76\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912130 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-service-ca-bundle\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912147 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl6wp\" (UniqueName: \"kubernetes.io/projected/ecd114c8-714d-447c-a9d6-49be4118f010-kube-api-access-sl6wp\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912168 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b55da778-ff99-4064-a60e-ee50f4f4f8e6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912191 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63497a4-0ba8-48a5-88af-dc20ef283130-config\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912207 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lttk4\" (UniqueName: \"kubernetes.io/projected/aa4e6638-81d2-47ff-8bea-beb731e7a905-kube-api-access-lttk4\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912222 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912259 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmsnl\" (UniqueName: \"kubernetes.io/projected/b55da778-ff99-4064-a60e-ee50f4f4f8e6-kube-api-access-mmsnl\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912278 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-client\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912297 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk8wx\" (UniqueName: \"kubernetes.io/projected/e9b3bf47-d12e-437e-8b8b-15fda1a2be69-kube-api-access-lk8wx\") pod \"dns-operator-744455d44c-kcszh\" (UID: \"e9b3bf47-d12e-437e-8b8b-15fda1a2be69\") " pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912311 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-srv-cert\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912341 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-oauth-serving-cert\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912358 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d25ffb7a-458f-4122-a01a-33edd82267cd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912378 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6dwk\" (UniqueName: \"kubernetes.io/projected/98892cff-c8b2-48af-aa53-a3cc727ecf46-kube-api-access-r6dwk\") pod \"cluster-samples-operator-665b6dd947-6v524\" (UID: \"98892cff-c8b2-48af-aa53-a3cc727ecf46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912397 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b55da778-ff99-4064-a60e-ee50f4f4f8e6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912413 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-profile-collector-cert\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912437 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912452 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-config\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912484 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-trusted-ca-bundle\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912503 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-config\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912524 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9b9nt\" (UniqueName: \"kubernetes.io/projected/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-kube-api-access-9b9nt\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912559 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912573 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-dir\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912590 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912607 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-service-ca\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912628 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b633eab-bed0-436d-ad6d-bd7f315dc172-service-ca-bundle\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912644 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-stats-auth\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912661 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b55da778-ff99-4064-a60e-ee50f4f4f8e6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912675 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d25ffb7a-458f-4122-a01a-33edd82267cd-config\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912701 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912717 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4baa33b-4274-46b2-83d4-8e80ad9542c8-serving-cert\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912735 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-config\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912779 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-policies\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912798 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-serving-cert\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.912815 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-config\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913066 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whl8f\" (UniqueName: \"kubernetes.io/projected/2702589d-bd8c-4401-a5d9-2d57c88f33f6-kube-api-access-whl8f\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913088 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d4baa33b-4274-46b2-83d4-8e80ad9542c8-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913111 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-ca\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913156 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-oauth-config\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913174 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa4e6638-81d2-47ff-8bea-beb731e7a905-serving-cert\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913190 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913206 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913222 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hzpg\" (UniqueName: \"kubernetes.io/projected/e63497a4-0ba8-48a5-88af-dc20ef283130-kube-api-access-7hzpg\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913240 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r2th\" (UniqueName: \"kubernetes.io/projected/b35796dc-d78b-48ec-be7b-45d072b20fe8-kube-api-access-8r2th\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913229 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913262 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63497a4-0ba8-48a5-88af-dc20ef283130-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913282 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913301 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d25ffb7a-458f-4122-a01a-33edd82267cd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913218 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-client-ca\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913241 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.913965 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-oauth-serving-cert\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.914100 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-dir\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.914219 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8mc8d"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.914326 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e63497a4-0ba8-48a5-88af-dc20ef283130-config\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.914922 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-service-ca\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.915086 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.915243 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-trusted-ca-bundle\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.915361 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b55da778-ff99-4064-a60e-ee50f4f4f8e6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.915561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-config\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.915642 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-config\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.915846 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.916277 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aa4e6638-81d2-47ff-8bea-beb731e7a905-service-ca-bundle\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.916529 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-policies\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.917049 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.917379 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.917918 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-config\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.917928 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.918993 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/98892cff-c8b2-48af-aa53-a3cc727ecf46-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6v524\" (UID: \"98892cff-c8b2-48af-aa53-a3cc727ecf46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.919796 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.920369 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.921002 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qnv2z"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.921888 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.922831 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e63497a4-0ba8-48a5-88af-dc20ef283130-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.925052 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.925321 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.930561 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/b55da778-ff99-4064-a60e-ee50f4f4f8e6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.930632 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.930881 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.931057 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-serving-cert\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.931153 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.931196 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-oauth-config\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.932244 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aa4e6638-81d2-47ff-8bea-beb731e7a905-serving-cert\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.933936 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/2702589d-bd8c-4401-a5d9-2d57c88f33f6-console-serving-cert\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.933986 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wk2wx"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.934143 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.935031 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kxgrb"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.936575 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.937884 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-xlnlq"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.938554 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xlnlq" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.939152 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-92b7k"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.940165 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-92b7k" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.940800 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vh9mz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.942202 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xlnlq"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.943379 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.945594 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.945924 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.946462 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.952405 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.955643 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kcszh"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.958330 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.959812 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.960678 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bxlnn"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.961660 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.962683 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.963689 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.963879 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.964674 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-92b7k"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.971553 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.972574 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.973526 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-9tnsf"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.975540 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-48x76"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.976440 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-48x76"] Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.976515 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:18:58 crc kubenswrapper[4747]: I1001 06:18:58.984995 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.005600 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014020 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-metrics-certs\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014063 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfhmd\" (UniqueName: \"kubernetes.io/projected/0b633eab-bed0-436d-ad6d-bd7f315dc172-kube-api-access-jfhmd\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014087 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-default-certificate\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014104 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4w44\" (UniqueName: \"kubernetes.io/projected/d4baa33b-4274-46b2-83d4-8e80ad9542c8-kube-api-access-m4w44\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014122 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ecd114c8-714d-447c-a9d6-49be4118f010-serving-cert\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014137 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e9b3bf47-d12e-437e-8b8b-15fda1a2be69-metrics-tls\") pod \"dns-operator-744455d44c-kcszh\" (UID: \"e9b3bf47-d12e-437e-8b8b-15fda1a2be69\") " pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014159 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnh76\" (UniqueName: \"kubernetes.io/projected/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-kube-api-access-dnh76\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014181 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl6wp\" (UniqueName: \"kubernetes.io/projected/ecd114c8-714d-447c-a9d6-49be4118f010-kube-api-access-sl6wp\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014221 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-client\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014246 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk8wx\" (UniqueName: \"kubernetes.io/projected/e9b3bf47-d12e-437e-8b8b-15fda1a2be69-kube-api-access-lk8wx\") pod \"dns-operator-744455d44c-kcszh\" (UID: \"e9b3bf47-d12e-437e-8b8b-15fda1a2be69\") " pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014282 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d25ffb7a-458f-4122-a01a-33edd82267cd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014306 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-srv-cert\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014337 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-profile-collector-cert\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014361 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-config\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014417 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-service-ca\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014438 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b633eab-bed0-436d-ad6d-bd7f315dc172-service-ca-bundle\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014461 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-stats-auth\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014499 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d25ffb7a-458f-4122-a01a-33edd82267cd-config\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014526 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4baa33b-4274-46b2-83d4-8e80ad9542c8-serving-cert\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014560 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d4baa33b-4274-46b2-83d4-8e80ad9542c8-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014585 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-ca\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.014627 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d25ffb7a-458f-4122-a01a-33edd82267cd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.016967 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/e9b3bf47-d12e-437e-8b8b-15fda1a2be69-metrics-tls\") pod \"dns-operator-744455d44c-kcszh\" (UID: \"e9b3bf47-d12e-437e-8b8b-15fda1a2be69\") " pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.016987 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-client\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.017306 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/d4baa33b-4274-46b2-83d4-8e80ad9542c8-available-featuregates\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.017681 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-ca\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.017881 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-config\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.017894 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d4baa33b-4274-46b2-83d4-8e80ad9542c8-serving-cert\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.018016 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ecd114c8-714d-447c-a9d6-49be4118f010-serving-cert\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.021932 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/ecd114c8-714d-447c-a9d6-49be4118f010-etcd-service-ca\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.024321 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.030056 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d25ffb7a-458f-4122-a01a-33edd82267cd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.045019 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.055486 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d25ffb7a-458f-4122-a01a-33edd82267cd-config\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.065020 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.104032 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.125140 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.144235 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.165459 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.184425 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.204645 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.224870 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.245224 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.265906 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.285057 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.305234 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.324989 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.344354 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.348462 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-default-certificate\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.364412 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.371520 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-stats-auth\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.384334 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.389447 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b633eab-bed0-436d-ad6d-bd7f315dc172-metrics-certs\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.404608 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.405971 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0b633eab-bed0-436d-ad6d-bd7f315dc172-service-ca-bundle\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.428135 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.445673 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.465538 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.485341 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.493842 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-srv-cert\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.505656 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.512860 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-profile-collector-cert\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.525637 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.565094 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.584571 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.605786 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.625483 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.644461 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.671794 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.685206 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.705409 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.724607 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.745607 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.766019 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.785192 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.805654 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.825167 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.844742 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.863129 4747 request.go:700] Waited for 1.006788832s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-console-operator/secrets?fieldSelector=metadata.name%3Dserving-cert&limit=500&resourceVersion=0 Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.865707 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.895524 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.905828 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.924934 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.945126 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.964987 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 01 06:18:59 crc kubenswrapper[4747]: I1001 06:18:59.985426 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.005180 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.025116 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.045213 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.065502 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.085070 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.104997 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.125011 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.144213 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.165348 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.185265 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.205147 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.225129 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.245345 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.264897 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.285667 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.304568 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.325238 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.345178 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.366022 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.385958 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.406064 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.425525 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.445471 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.465127 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.484927 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.505875 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.525586 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.572285 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b55da778-ff99-4064-a60e-ee50f4f4f8e6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.593870 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6dwk\" (UniqueName: \"kubernetes.io/projected/98892cff-c8b2-48af-aa53-a3cc727ecf46-kube-api-access-r6dwk\") pod \"cluster-samples-operator-665b6dd947-6v524\" (UID: \"98892cff-c8b2-48af-aa53-a3cc727ecf46\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.612787 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lttk4\" (UniqueName: \"kubernetes.io/projected/aa4e6638-81d2-47ff-8bea-beb731e7a905-kube-api-access-lttk4\") pod \"authentication-operator-69f744f599-nsd9h\" (UID: \"aa4e6638-81d2-47ff-8bea-beb731e7a905\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.620420 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whl8f\" (UniqueName: \"kubernetes.io/projected/2702589d-bd8c-4401-a5d9-2d57c88f33f6-kube-api-access-whl8f\") pod \"console-f9d7485db-csw7c\" (UID: \"2702589d-bd8c-4401-a5d9-2d57c88f33f6\") " pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.642404 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmsnl\" (UniqueName: \"kubernetes.io/projected/b55da778-ff99-4064-a60e-ee50f4f4f8e6-kube-api-access-mmsnl\") pod \"cluster-image-registry-operator-dc59b4c8b-pghqr\" (UID: \"b55da778-ff99-4064-a60e-ee50f4f4f8e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.660724 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hzpg\" (UniqueName: \"kubernetes.io/projected/e63497a4-0ba8-48a5-88af-dc20ef283130-kube-api-access-7hzpg\") pod \"openshift-apiserver-operator-796bbdcf4f-z4bz4\" (UID: \"e63497a4-0ba8-48a5-88af-dc20ef283130\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.690852 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r2th\" (UniqueName: \"kubernetes.io/projected/b35796dc-d78b-48ec-be7b-45d072b20fe8-kube-api-access-8r2th\") pod \"oauth-openshift-558db77b4-q2zvk\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.704204 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.706525 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9b9nt\" (UniqueName: \"kubernetes.io/projected/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-kube-api-access-9b9nt\") pod \"controller-manager-879f6c89f-8jxsb\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.725160 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.745891 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.764951 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.786107 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.803660 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.806142 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.826562 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.835944 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.843417 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.845436 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.857878 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.863611 4747 request.go:700] Waited for 1.886813325s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/secrets?fieldSelector=metadata.name%3Dcsi-hostpath-provisioner-sa-dockercfg-qd74k&limit=500&resourceVersion=0 Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.865268 4747 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.879641 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.885796 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.907129 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.912662 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.932053 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfhmd\" (UniqueName: \"kubernetes.io/projected/0b633eab-bed0-436d-ad6d-bd7f315dc172-kube-api-access-jfhmd\") pod \"router-default-5444994796-5nq8g\" (UID: \"0b633eab-bed0-436d-ad6d-bd7f315dc172\") " pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.949116 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl6wp\" (UniqueName: \"kubernetes.io/projected/ecd114c8-714d-447c-a9d6-49be4118f010-kube-api-access-sl6wp\") pod \"etcd-operator-b45778765-wk2wx\" (UID: \"ecd114c8-714d-447c-a9d6-49be4118f010\") " pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.980634 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnh76\" (UniqueName: \"kubernetes.io/projected/5a7d70fb-39d4-4f9c-a40c-b321880d83a7-kube-api-access-dnh76\") pod \"catalog-operator-68c6474976-2ct5k\" (UID: \"5a7d70fb-39d4-4f9c-a40c-b321880d83a7\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:19:00 crc kubenswrapper[4747]: I1001 06:19:00.996780 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk8wx\" (UniqueName: \"kubernetes.io/projected/e9b3bf47-d12e-437e-8b8b-15fda1a2be69-kube-api-access-lk8wx\") pod \"dns-operator-744455d44c-kcszh\" (UID: \"e9b3bf47-d12e-437e-8b8b-15fda1a2be69\") " pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:00.999953 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4w44\" (UniqueName: \"kubernetes.io/projected/d4baa33b-4274-46b2-83d4-8e80ad9542c8-kube-api-access-m4w44\") pod \"openshift-config-operator-7777fb866f-bp7w9\" (UID: \"d4baa33b-4274-46b2-83d4-8e80ad9542c8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.017344 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d25ffb7a-458f-4122-a01a-33edd82267cd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-9wgz7\" (UID: \"d25ffb7a-458f-4122-a01a-33edd82267cd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.047594 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-certificates\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.047899 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-tls\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.048932 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.049313 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:01.549293548 +0000 UTC m=+142.958950687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.049363 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5de5adaf-4595-446d-9b77-a48824db2dfa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.055031 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.081301 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.101783 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.102952 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.114414 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.150957 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.151797 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jg6f\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-kube-api-access-7jg6f\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.152167 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnxgz\" (UniqueName: \"kubernetes.io/projected/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-kube-api-access-xnxgz\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.152195 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-webhook-cert\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.152220 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:01.652201768 +0000 UTC m=+143.061858817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.152389 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-trusted-ca\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.152454 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b491ab08-e1e7-4166-b2fc-3d265a06414f-config\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.152474 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-tmpfs\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.152540 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-audit-dir\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.152670 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67mqq\" (UniqueName: \"kubernetes.io/projected/07aacdeb-d996-4747-a1d8-4803d5f7f4a7-kube-api-access-67mqq\") pod \"migrator-59844c95c7-4kxcx\" (UID: \"07aacdeb-d996-4747-a1d8-4803d5f7f4a7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.152931 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5de5adaf-4595-446d-9b77-a48824db2dfa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.153073 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-config\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.153271 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.153306 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.154052 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f76f06cb-49af-4c1f-a27f-776b2bac7b33-trusted-ca\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.154071 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b491ab08-e1e7-4166-b2fc-3d265a06414f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.154098 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8f59t\" (UniqueName: \"kubernetes.io/projected/63fd244b-ef08-493d-80e6-1dbf110f5207-kube-api-access-8f59t\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.154119 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hw4d\" (UniqueName: \"kubernetes.io/projected/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-kube-api-access-7hw4d\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.154777 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c94e27c1-1f36-4b84-9ce0-280fdb611c72-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.155273 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwr88\" (UniqueName: \"kubernetes.io/projected/9d504980-0efe-4f16-b3ec-a94e4c0e0384-kube-api-access-cwr88\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.155356 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f76f06cb-49af-4c1f-a27f-776b2bac7b33-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.155427 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.155474 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6hvr\" (UniqueName: \"kubernetes.io/projected/b491ab08-e1e7-4166-b2fc-3d265a06414f-kube-api-access-x6hvr\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.155491 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fms86\" (UniqueName: \"kubernetes.io/projected/492d31e9-e510-4e8b-9042-38ca4be5b283-kube-api-access-fms86\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.156056 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/492d31e9-e510-4e8b-9042-38ca4be5b283-images\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.156114 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-serving-cert\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.157284 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/492d31e9-e510-4e8b-9042-38ca4be5b283-auth-proxy-config\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.157447 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-bound-sa-token\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.157686 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-tls\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.157938 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158070 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158089 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-auth-proxy-config\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158132 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlwst\" (UniqueName: \"kubernetes.io/projected/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-kube-api-access-wlwst\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158148 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-etcd-serving-ca\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158170 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-apiservice-cert\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158246 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-config\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158346 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-audit\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158425 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmtqb\" (UniqueName: \"kubernetes.io/projected/f8aec98c-b046-465a-b712-0db02e816e4f-kube-api-access-mmtqb\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158481 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5de5adaf-4595-446d-9b77-a48824db2dfa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158538 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-node-pullsecrets\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.158561 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f8aec98c-b046-465a-b712-0db02e816e4f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.159131 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:01.659118033 +0000 UTC m=+143.068775082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.159368 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-image-import-ca\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.159562 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjjp5\" (UniqueName: \"kubernetes.io/projected/e5175887-79c8-46f9-9708-d4d9afea026d-kube-api-access-qjjp5\") pod \"downloads-7954f5f757-bxlnn\" (UID: \"e5175887-79c8-46f9-9708-d4d9afea026d\") " pod="openshift-console/downloads-7954f5f757-bxlnn" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.159787 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-encryption-config\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.159903 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5efb7422-a464-4daf-991f-808ba693495c-secret-volume\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.159921 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.159981 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5efb7422-a464-4daf-991f-808ba693495c-config-volume\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.159999 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-serving-cert\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160056 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drsln\" (UniqueName: \"kubernetes.io/projected/f76f06cb-49af-4c1f-a27f-776b2bac7b33-kube-api-access-drsln\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160145 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5de5adaf-4595-446d-9b77-a48824db2dfa-ca-trust-extracted\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160224 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-machine-approver-tls\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160259 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-config\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160305 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-config\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160537 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w858z\" (UniqueName: \"kubernetes.io/projected/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-kube-api-access-w858z\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160558 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c94e27c1-1f36-4b84-9ce0-280fdb611c72-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160649 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f76f06cb-49af-4c1f-a27f-776b2bac7b33-metrics-tls\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160795 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b491ab08-e1e7-4166-b2fc-3d265a06414f-images\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.160833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/492d31e9-e510-4e8b-9042-38ca4be5b283-proxy-tls\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.161054 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-etcd-client\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.161206 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63fd244b-ef08-493d-80e6-1dbf110f5207-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.161225 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63fd244b-ef08-493d-80e6-1dbf110f5207-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.161239 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-client-ca\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.161258 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srdmv\" (UniqueName: \"kubernetes.io/projected/5efb7422-a464-4daf-991f-808ba693495c-kube-api-access-srdmv\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.161273 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c94e27c1-1f36-4b84-9ce0-280fdb611c72-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.161305 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-certificates\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.161433 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f8aec98c-b046-465a-b712-0db02e816e4f-proxy-tls\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.162465 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-certificates\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.166938 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-tls\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.234266 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.234378 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" Oct 01 06:19:01 crc kubenswrapper[4747]: W1001 06:19:01.241086 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode63497a4_0ba8_48a5_88af_dc20ef283130.slice/crio-c43236f5bba3eb720fed41c626864bbe266ea66e19b7d29f9842ce2667cd4ee1 WatchSource:0}: Error finding container c43236f5bba3eb720fed41c626864bbe266ea66e19b7d29f9842ce2667cd4ee1: Status 404 returned error can't find the container with id c43236f5bba3eb720fed41c626864bbe266ea66e19b7d29f9842ce2667cd4ee1 Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.262538 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.262960 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.262981 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:01.762959387 +0000 UTC m=+143.172616436 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263014 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263050 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzb24\" (UniqueName: \"kubernetes.io/projected/9847d204-b75b-41bb-8f4b-03058aeab9fb-kube-api-access-vzb24\") pod \"package-server-manager-789f6589d5-q6jpz\" (UID: \"9847d204-b75b-41bb-8f4b-03058aeab9fb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263067 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/35966a08-49de-4d9a-baa2-0b1b030b2353-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263088 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tj6nb\" (UniqueName: \"kubernetes.io/projected/35966a08-49de-4d9a-baa2-0b1b030b2353-kube-api-access-tj6nb\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263108 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ae63cbe5-e016-4101-8fe0-72aea96d7977-node-bootstrap-token\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263154 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f76f06cb-49af-4c1f-a27f-776b2bac7b33-trusted-ca\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263172 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b491ab08-e1e7-4166-b2fc-3d265a06414f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263191 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36d2fb2f-6d99-4e02-a599-55ad93259804-trusted-ca\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263508 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-signing-cabundle\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263540 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-registration-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263567 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8f59t\" (UniqueName: \"kubernetes.io/projected/63fd244b-ef08-493d-80e6-1dbf110f5207-kube-api-access-8f59t\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263616 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-plugins-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263645 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hw4d\" (UniqueName: \"kubernetes.io/projected/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-kube-api-access-7hw4d\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263667 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9847d204-b75b-41bb-8f4b-03058aeab9fb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-q6jpz\" (UID: \"9847d204-b75b-41bb-8f4b-03058aeab9fb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.263693 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/35966a08-49de-4d9a-baa2-0b1b030b2353-srv-cert\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.266570 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f76f06cb-49af-4c1f-a27f-776b2bac7b33-trusted-ca\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.266627 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dc3f0a86-ed2e-452b-b829-e2fc65bbac66-cert\") pod \"ingress-canary-xlnlq\" (UID: \"dc3f0a86-ed2e-452b-b829-e2fc65bbac66\") " pod="openshift-ingress-canary/ingress-canary-xlnlq" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.266652 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-encryption-config\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.266690 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c94e27c1-1f36-4b84-9ce0-280fdb611c72-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267042 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-wk2wx"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267077 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwr88\" (UniqueName: \"kubernetes.io/projected/9d504980-0efe-4f16-b3ec-a94e4c0e0384-kube-api-access-cwr88\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267121 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-socket-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267145 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5sgj\" (UniqueName: \"kubernetes.io/projected/ae63cbe5-e016-4101-8fe0-72aea96d7977-kube-api-access-h5sgj\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267180 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f76f06cb-49af-4c1f-a27f-776b2bac7b33-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267201 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-csi-data-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267226 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267250 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6hvr\" (UniqueName: \"kubernetes.io/projected/b491ab08-e1e7-4166-b2fc-3d265a06414f-kube-api-access-x6hvr\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267273 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fms86\" (UniqueName: \"kubernetes.io/projected/492d31e9-e510-4e8b-9042-38ca4be5b283-kube-api-access-fms86\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267301 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/492d31e9-e510-4e8b-9042-38ca4be5b283-images\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267323 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-mountpoint-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267368 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-serving-cert\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267391 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5378035b-02a7-42d4-9c55-91de32b377c0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-w2ctv\" (UID: \"5378035b-02a7-42d4-9c55-91de32b377c0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267414 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-etcd-client\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267435 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-audit-policies\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267460 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-serving-cert\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267481 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36d2fb2f-6d99-4e02-a599-55ad93259804-serving-cert\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267501 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2a26b929-108c-43a8-a196-eedee3230161-config-volume\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267522 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-config\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267549 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/492d31e9-e510-4e8b-9042-38ca4be5b283-auth-proxy-config\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267573 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-bound-sa-token\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267600 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267621 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-auth-proxy-config\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267644 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j5v4\" (UniqueName: \"kubernetes.io/projected/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-kube-api-access-4j5v4\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267670 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267693 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlwst\" (UniqueName: \"kubernetes.io/projected/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-kube-api-access-wlwst\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267715 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-apiservice-cert\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267737 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9tfm\" (UniqueName: \"kubernetes.io/projected/2ddb2a27-e181-4592-b710-69693c5a3efe-kube-api-access-l9tfm\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267779 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-etcd-serving-ca\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267800 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-config\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267821 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxgwd\" (UniqueName: \"kubernetes.io/projected/86217928-f132-414b-a374-3e20ea531035-kube-api-access-gxgwd\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267845 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-audit\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267869 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmtqb\" (UniqueName: \"kubernetes.io/projected/f8aec98c-b046-465a-b712-0db02e816e4f-kube-api-access-mmtqb\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267895 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-node-pullsecrets\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267916 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f8aec98c-b046-465a-b712-0db02e816e4f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267939 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/77e07ef9-a0c4-4677-9689-cc571997ecf7-audit-dir\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.267992 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-image-import-ca\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.268021 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjjp5\" (UniqueName: \"kubernetes.io/projected/e5175887-79c8-46f9-9708-d4d9afea026d-kube-api-access-qjjp5\") pod \"downloads-7954f5f757-bxlnn\" (UID: \"e5175887-79c8-46f9-9708-d4d9afea026d\") " pod="openshift-console/downloads-7954f5f757-bxlnn" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.268044 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-encryption-config\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.268071 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5efb7422-a464-4daf-991f-808ba693495c-secret-volume\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.268093 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.268118 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtbtm\" (UniqueName: \"kubernetes.io/projected/9648d961-f516-4332-9ff8-225f40f0af8f-kube-api-access-jtbtm\") pod \"multus-admission-controller-857f4d67dd-9tnsf\" (UID: \"9648d961-f516-4332-9ff8-225f40f0af8f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.271555 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.271807 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/492d31e9-e510-4e8b-9042-38ca4be5b283-images\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.271831 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/492d31e9-e510-4e8b-9042-38ca4be5b283-auth-proxy-config\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.271931 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-node-pullsecrets\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.272716 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-etcd-serving-ca\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.272815 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-audit\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.273053 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/f8aec98c-b046-465a-b712-0db02e816e4f-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.273178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-image-import-ca\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.273673 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-config\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.273705 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.273851 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/b491ab08-e1e7-4166-b2fc-3d265a06414f-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.273907 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5efb7422-a464-4daf-991f-808ba693495c-config-volume\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.273930 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-serving-cert\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.273955 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drsln\" (UniqueName: \"kubernetes.io/projected/f76f06cb-49af-4c1f-a27f-776b2bac7b33-kube-api-access-drsln\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.273993 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:01.773979743 +0000 UTC m=+143.183636792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274592 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5efb7422-a464-4daf-991f-808ba693495c-config-volume\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274815 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-machine-approver-tls\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274842 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ae63cbe5-e016-4101-8fe0-72aea96d7977-certs\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274865 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-config\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274899 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpf42\" (UniqueName: \"kubernetes.io/projected/5378035b-02a7-42d4-9c55-91de32b377c0-kube-api-access-cpf42\") pod \"control-plane-machine-set-operator-78cbb6b69f-w2ctv\" (UID: \"5378035b-02a7-42d4-9c55-91de32b377c0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274917 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-config\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274933 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d2fb2f-6d99-4e02-a599-55ad93259804-config\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274948 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-serving-cert\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274966 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w858z\" (UniqueName: \"kubernetes.io/projected/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-kube-api-access-w858z\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.274985 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c94e27c1-1f36-4b84-9ce0-280fdb611c72-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275003 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bdc2\" (UniqueName: \"kubernetes.io/projected/2a26b929-108c-43a8-a196-eedee3230161-kube-api-access-6bdc2\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275024 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b491ab08-e1e7-4166-b2fc-3d265a06414f-images\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/492d31e9-e510-4e8b-9042-38ca4be5b283-proxy-tls\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275064 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f76f06cb-49af-4c1f-a27f-776b2bac7b33-metrics-tls\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275081 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-etcd-client\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275108 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk959\" (UniqueName: \"kubernetes.io/projected/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-kube-api-access-lk959\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275124 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-client-ca\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275139 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2a26b929-108c-43a8-a196-eedee3230161-metrics-tls\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275151 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-apiservice-cert\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275155 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63fd244b-ef08-493d-80e6-1dbf110f5207-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.275796 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-config\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.276674 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5efb7422-a464-4daf-991f-808ba693495c-secret-volume\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.277027 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-auth-proxy-config\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.293169 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-config\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.293597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-serving-cert\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.293669 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.294026 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c94e27c1-1f36-4b84-9ce0-280fdb611c72-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.294245 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-client-ca\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.294380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63fd244b-ef08-493d-80e6-1dbf110f5207-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.294387 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-encryption-config\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.294590 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.294661 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-machine-approver-tls\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.294874 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c94e27c1-1f36-4b84-9ce0-280fdb611c72-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.294931 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7lvm\" (UniqueName: \"kubernetes.io/projected/dc3f0a86-ed2e-452b-b829-e2fc65bbac66-kube-api-access-s7lvm\") pod \"ingress-canary-xlnlq\" (UID: \"dc3f0a86-ed2e-452b-b829-e2fc65bbac66\") " pod="openshift-ingress-canary/ingress-canary-xlnlq" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.295170 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srdmv\" (UniqueName: \"kubernetes.io/projected/5efb7422-a464-4daf-991f-808ba693495c-kube-api-access-srdmv\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.295267 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.295673 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/63fd244b-ef08-493d-80e6-1dbf110f5207-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.296524 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c94e27c1-1f36-4b84-9ce0-280fdb611c72-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.297469 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-serving-cert\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.297682 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f8aec98c-b046-465a-b712-0db02e816e4f-proxy-tls\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.297774 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9648d961-f516-4332-9ff8-225f40f0af8f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-9tnsf\" (UID: \"9648d961-f516-4332-9ff8-225f40f0af8f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.297815 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.297870 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63fd244b-ef08-493d-80e6-1dbf110f5207-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.297993 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ddb2a27-e181-4592-b710-69693c5a3efe-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.298036 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-signing-key\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.298065 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ddb2a27-e181-4592-b710-69693c5a3efe-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.298124 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxb4k\" (UniqueName: \"kubernetes.io/projected/77e07ef9-a0c4-4677-9689-cc571997ecf7-kube-api-access-wxb4k\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.298163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jg6f\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-kube-api-access-7jg6f\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.298219 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-webhook-cert\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.298248 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnxgz\" (UniqueName: \"kubernetes.io/projected/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-kube-api-access-xnxgz\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.298606 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b491ab08-e1e7-4166-b2fc-3d265a06414f-images\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.298892 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.299179 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-trusted-ca\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.299430 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b491ab08-e1e7-4166-b2fc-3d265a06414f-config\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.299509 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-tmpfs\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.299902 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-etcd-client\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300070 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b491ab08-e1e7-4166-b2fc-3d265a06414f-config\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300320 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-audit-dir\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300339 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-tmpfs\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300374 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-audit-dir\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300376 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67mqq\" (UniqueName: \"kubernetes.io/projected/07aacdeb-d996-4747-a1d8-4803d5f7f4a7-kube-api-access-67mqq\") pod \"migrator-59844c95c7-4kxcx\" (UID: \"07aacdeb-d996-4747-a1d8-4803d5f7f4a7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300531 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f76f06cb-49af-4c1f-a27f-776b2bac7b33-metrics-tls\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300542 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r87sf\" (UniqueName: \"kubernetes.io/projected/36d2fb2f-6d99-4e02-a599-55ad93259804-kube-api-access-r87sf\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300582 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5de5adaf-4595-446d-9b77-a48824db2dfa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.300633 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-config\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.302612 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-trusted-ca\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.303005 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5de5adaf-4595-446d-9b77-a48824db2dfa-installation-pull-secrets\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.303264 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-config\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.304425 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/f8aec98c-b046-465a-b712-0db02e816e4f-proxy-tls\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.309043 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-kcszh"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.309192 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/492d31e9-e510-4e8b-9042-38ca4be5b283-proxy-tls\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.309772 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-webhook-cert\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.312140 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75cbdf07-2be3-4b03-9241-e7e7d7de0f70-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-ldwjw\" (UID: \"75cbdf07-2be3-4b03-9241-e7e7d7de0f70\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.326351 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8f59t\" (UniqueName: \"kubernetes.io/projected/63fd244b-ef08-493d-80e6-1dbf110f5207-kube-api-access-8f59t\") pod \"openshift-controller-manager-operator-756b6f6bc6-86zb4\" (UID: \"63fd244b-ef08-493d-80e6-1dbf110f5207\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.345648 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.359654 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-nsd9h"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.359696 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8jxsb"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.362513 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-csw7c"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.370648 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.371908 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.377275 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c94e27c1-1f36-4b84-9ce0-280fdb611c72-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2rtkg\" (UID: \"c94e27c1-1f36-4b84-9ce0-280fdb611c72\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.378886 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hw4d\" (UniqueName: \"kubernetes.io/projected/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-kube-api-access-7hw4d\") pod \"route-controller-manager-6576b87f9c-77f8s\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.394266 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403206 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403325 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d2fb2f-6d99-4e02-a599-55ad93259804-config\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403349 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-serving-cert\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403375 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bdc2\" (UniqueName: \"kubernetes.io/projected/2a26b929-108c-43a8-a196-eedee3230161-kube-api-access-6bdc2\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403396 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk959\" (UniqueName: \"kubernetes.io/projected/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-kube-api-access-lk959\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403415 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2a26b929-108c-43a8-a196-eedee3230161-metrics-tls\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403446 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7lvm\" (UniqueName: \"kubernetes.io/projected/dc3f0a86-ed2e-452b-b829-e2fc65bbac66-kube-api-access-s7lvm\") pod \"ingress-canary-xlnlq\" (UID: \"dc3f0a86-ed2e-452b-b829-e2fc65bbac66\") " pod="openshift-ingress-canary/ingress-canary-xlnlq" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403466 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403488 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403504 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9648d961-f516-4332-9ff8-225f40f0af8f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-9tnsf\" (UID: \"9648d961-f516-4332-9ff8-225f40f0af8f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403519 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-signing-key\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403534 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ddb2a27-e181-4592-b710-69693c5a3efe-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403548 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ddb2a27-e181-4592-b710-69693c5a3efe-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403575 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxb4k\" (UniqueName: \"kubernetes.io/projected/77e07ef9-a0c4-4677-9689-cc571997ecf7-kube-api-access-wxb4k\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403615 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r87sf\" (UniqueName: \"kubernetes.io/projected/36d2fb2f-6d99-4e02-a599-55ad93259804-kube-api-access-r87sf\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403635 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/35966a08-49de-4d9a-baa2-0b1b030b2353-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403650 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzb24\" (UniqueName: \"kubernetes.io/projected/9847d204-b75b-41bb-8f4b-03058aeab9fb-kube-api-access-vzb24\") pod \"package-server-manager-789f6589d5-q6jpz\" (UID: \"9847d204-b75b-41bb-8f4b-03058aeab9fb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403665 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ae63cbe5-e016-4101-8fe0-72aea96d7977-node-bootstrap-token\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403679 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tj6nb\" (UniqueName: \"kubernetes.io/projected/35966a08-49de-4d9a-baa2-0b1b030b2353-kube-api-access-tj6nb\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403693 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36d2fb2f-6d99-4e02-a599-55ad93259804-trusted-ca\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403708 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-signing-cabundle\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403725 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-registration-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403739 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-plugins-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403845 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9847d204-b75b-41bb-8f4b-03058aeab9fb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-q6jpz\" (UID: \"9847d204-b75b-41bb-8f4b-03058aeab9fb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403864 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/35966a08-49de-4d9a-baa2-0b1b030b2353-srv-cert\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403878 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-encryption-config\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403892 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dc3f0a86-ed2e-452b-b829-e2fc65bbac66-cert\") pod \"ingress-canary-xlnlq\" (UID: \"dc3f0a86-ed2e-452b-b829-e2fc65bbac66\") " pod="openshift-ingress-canary/ingress-canary-xlnlq" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403937 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-socket-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403951 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5sgj\" (UniqueName: \"kubernetes.io/projected/ae63cbe5-e016-4101-8fe0-72aea96d7977-kube-api-access-h5sgj\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.403975 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-csi-data-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404006 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-mountpoint-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404023 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-serving-cert\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404039 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5378035b-02a7-42d4-9c55-91de32b377c0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-w2ctv\" (UID: \"5378035b-02a7-42d4-9c55-91de32b377c0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404059 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-audit-policies\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404073 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-etcd-client\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404087 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36d2fb2f-6d99-4e02-a599-55ad93259804-serving-cert\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404101 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2a26b929-108c-43a8-a196-eedee3230161-config-volume\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404116 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-config\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404140 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j5v4\" (UniqueName: \"kubernetes.io/projected/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-kube-api-access-4j5v4\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404171 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9tfm\" (UniqueName: \"kubernetes.io/projected/2ddb2a27-e181-4592-b710-69693c5a3efe-kube-api-access-l9tfm\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404188 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxgwd\" (UniqueName: \"kubernetes.io/projected/86217928-f132-414b-a374-3e20ea531035-kube-api-access-gxgwd\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404207 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/77e07ef9-a0c4-4677-9689-cc571997ecf7-audit-dir\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404238 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtbtm\" (UniqueName: \"kubernetes.io/projected/9648d961-f516-4332-9ff8-225f40f0af8f-kube-api-access-jtbtm\") pod \"multus-admission-controller-857f4d67dd-9tnsf\" (UID: \"9648d961-f516-4332-9ff8-225f40f0af8f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404259 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ae63cbe5-e016-4101-8fe0-72aea96d7977-certs\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404289 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpf42\" (UniqueName: \"kubernetes.io/projected/5378035b-02a7-42d4-9c55-91de32b377c0-kube-api-access-cpf42\") pod \"control-plane-machine-set-operator-78cbb6b69f-w2ctv\" (UID: \"5378035b-02a7-42d4-9c55-91de32b377c0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.404521 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:01.904507362 +0000 UTC m=+143.314164401 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.404838 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-plugins-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.407309 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36d2fb2f-6d99-4e02-a599-55ad93259804-config\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.408246 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-audit-policies\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.408347 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-mountpoint-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.408352 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.408374 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q2zvk"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.408436 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-csi-data-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.408717 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-socket-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.409464 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/77e07ef9-a0c4-4677-9689-cc571997ecf7-audit-dir\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.410362 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/77e07ef9-a0c4-4677-9689-cc571997ecf7-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.410489 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwr88\" (UniqueName: \"kubernetes.io/projected/9d504980-0efe-4f16-b3ec-a94e4c0e0384-kube-api-access-cwr88\") pod \"marketplace-operator-79b997595-284zz\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.411055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ddb2a27-e181-4592-b710-69693c5a3efe-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.411242 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-signing-cabundle\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.411825 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/86217928-f132-414b-a374-3e20ea531035-registration-dir\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.412632 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2a26b929-108c-43a8-a196-eedee3230161-config-volume\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.412949 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/36d2fb2f-6d99-4e02-a599-55ad93259804-trusted-ca\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.413455 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-config\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.416488 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/35966a08-49de-4d9a-baa2-0b1b030b2353-srv-cert\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.417175 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-etcd-client\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.418253 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f76f06cb-49af-4c1f-a27f-776b2bac7b33-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.418259 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ae63cbe5-e016-4101-8fe0-72aea96d7977-node-bootstrap-token\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.418590 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/9648d961-f516-4332-9ff8-225f40f0af8f-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-9tnsf\" (UID: \"9648d961-f516-4332-9ff8-225f40f0af8f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.418661 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-signing-key\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.418995 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/36d2fb2f-6d99-4e02-a599-55ad93259804-serving-cert\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.419530 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2a26b929-108c-43a8-a196-eedee3230161-metrics-tls\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.420656 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/35966a08-49de-4d9a-baa2-0b1b030b2353-profile-collector-cert\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.420759 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/5378035b-02a7-42d4-9c55-91de32b377c0-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-w2ctv\" (UID: \"5378035b-02a7-42d4-9c55-91de32b377c0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.421081 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ddb2a27-e181-4592-b710-69693c5a3efe-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: W1001 06:19:01.421325 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9b0ef66_c372_4715_b069_4cdbcaf66f1f.slice/crio-2aa41ee4a51de0b6fdab53b456a5885aff859576e1f0abd75b6ad1c648dd5e83 WatchSource:0}: Error finding container 2aa41ee4a51de0b6fdab53b456a5885aff859576e1f0abd75b6ad1c648dd5e83: Status 404 returned error can't find the container with id 2aa41ee4a51de0b6fdab53b456a5885aff859576e1f0abd75b6ad1c648dd5e83 Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.421377 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-serving-cert\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.421613 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/77e07ef9-a0c4-4677-9689-cc571997ecf7-encryption-config\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.421683 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/dc3f0a86-ed2e-452b-b829-e2fc65bbac66-cert\") pod \"ingress-canary-xlnlq\" (UID: \"dc3f0a86-ed2e-452b-b829-e2fc65bbac66\") " pod="openshift-ingress-canary/ingress-canary-xlnlq" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.422541 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ae63cbe5-e016-4101-8fe0-72aea96d7977-certs\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.424991 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9847d204-b75b-41bb-8f4b-03058aeab9fb-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-q6jpz\" (UID: \"9847d204-b75b-41bb-8f4b-03058aeab9fb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:01 crc kubenswrapper[4747]: W1001 06:19:01.433844 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb55da778_ff99_4064_a60e_ee50f4f4f8e6.slice/crio-4b52a38f358e33b2e39db2dcfe650d6af32076c1184d03fe0002a617d1f6d05c WatchSource:0}: Error finding container 4b52a38f358e33b2e39db2dcfe650d6af32076c1184d03fe0002a617d1f6d05c: Status 404 returned error can't find the container with id 4b52a38f358e33b2e39db2dcfe650d6af32076c1184d03fe0002a617d1f6d05c Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.438378 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-bound-sa-token\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.440386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-serving-cert\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.458047 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.461149 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6hvr\" (UniqueName: \"kubernetes.io/projected/b491ab08-e1e7-4166-b2fc-3d265a06414f-kube-api-access-x6hvr\") pod \"machine-api-operator-5694c8668f-vh9mz\" (UID: \"b491ab08-e1e7-4166-b2fc-3d265a06414f\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.479021 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fms86\" (UniqueName: \"kubernetes.io/projected/492d31e9-e510-4e8b-9042-38ca4be5b283-kube-api-access-fms86\") pod \"machine-config-operator-74547568cd-mmmb9\" (UID: \"492d31e9-e510-4e8b-9042-38ca4be5b283\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.493260 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.504507 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmtqb\" (UniqueName: \"kubernetes.io/projected/f8aec98c-b046-465a-b712-0db02e816e4f-kube-api-access-mmtqb\") pod \"machine-config-controller-84d6567774-xccrp\" (UID: \"f8aec98c-b046-465a-b712-0db02e816e4f\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.505153 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.505568 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.005556011 +0000 UTC m=+143.415213060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.522799 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlwst\" (UniqueName: \"kubernetes.io/projected/eaddcad0-cef4-4919-84c0-3edbc3b2bd6c-kube-api-access-wlwst\") pod \"machine-approver-56656f9798-wvm4n\" (UID: \"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.540083 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjjp5\" (UniqueName: \"kubernetes.io/projected/e5175887-79c8-46f9-9708-d4d9afea026d-kube-api-access-qjjp5\") pod \"downloads-7954f5f757-bxlnn\" (UID: \"e5175887-79c8-46f9-9708-d4d9afea026d\") " pod="openshift-console/downloads-7954f5f757-bxlnn" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.551870 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-bxlnn" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.561917 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drsln\" (UniqueName: \"kubernetes.io/projected/f76f06cb-49af-4c1f-a27f-776b2bac7b33-kube-api-access-drsln\") pod \"ingress-operator-5b745b69d9-pjjgz\" (UID: \"f76f06cb-49af-4c1f-a27f-776b2bac7b33\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.580289 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.584588 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w858z\" (UniqueName: \"kubernetes.io/projected/da478e7d-66d5-4ab9-9a01-66c1f57d8ef5-kube-api-access-w858z\") pod \"apiserver-76f77b778f-qnv2z\" (UID: \"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5\") " pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.597094 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.600801 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.605893 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.606263 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.10623317 +0000 UTC m=+143.515890219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.607361 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.607678 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srdmv\" (UniqueName: \"kubernetes.io/projected/5efb7422-a464-4daf-991f-808ba693495c-kube-api-access-srdmv\") pod \"collect-profiles-29321655-jzmsw\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.607711 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.107699649 +0000 UTC m=+143.517356698 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.611504 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" Oct 01 06:19:01 crc kubenswrapper[4747]: W1001 06:19:01.624890 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75cbdf07_2be3_4b03_9241_e7e7d7de0f70.slice/crio-040439a1cac91895ecef8161ed7a979a8a6e7cc4516a30c5e02aec673c3263df WatchSource:0}: Error finding container 040439a1cac91895ecef8161ed7a979a8a6e7cc4516a30c5e02aec673c3263df: Status 404 returned error can't find the container with id 040439a1cac91895ecef8161ed7a979a8a6e7cc4516a30c5e02aec673c3263df Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.641610 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jg6f\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-kube-api-access-7jg6f\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.642446 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.645468 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnxgz\" (UniqueName: \"kubernetes.io/projected/37ccee71-1ac4-49a3-bef1-74f2fe4babe7-kube-api-access-xnxgz\") pod \"packageserver-d55dfcdfc-689vg\" (UID: \"37ccee71-1ac4-49a3-bef1-74f2fe4babe7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.649919 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.664363 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.673013 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67mqq\" (UniqueName: \"kubernetes.io/projected/07aacdeb-d996-4747-a1d8-4803d5f7f4a7-kube-api-access-67mqq\") pod \"migrator-59844c95c7-4kxcx\" (UID: \"07aacdeb-d996-4747-a1d8-4803d5f7f4a7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.684158 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.687570 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.701916 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpf42\" (UniqueName: \"kubernetes.io/projected/5378035b-02a7-42d4-9c55-91de32b377c0-kube-api-access-cpf42\") pod \"control-plane-machine-set-operator-78cbb6b69f-w2ctv\" (UID: \"5378035b-02a7-42d4-9c55-91de32b377c0\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.708865 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.708955 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.709100 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.209073487 +0000 UTC m=+143.618730536 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.709144 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.709468 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.209456738 +0000 UTC m=+143.619113777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.725939 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7lvm\" (UniqueName: \"kubernetes.io/projected/dc3f0a86-ed2e-452b-b829-e2fc65bbac66-kube-api-access-s7lvm\") pod \"ingress-canary-xlnlq\" (UID: \"dc3f0a86-ed2e-452b-b829-e2fc65bbac66\") " pod="openshift-ingress-canary/ingress-canary-xlnlq" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.749667 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.750177 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.753125 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-284zz"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.761648 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk959\" (UniqueName: \"kubernetes.io/projected/d8ba6078-ec36-41f2-a9c4-01f4cfbce71c-kube-api-access-lk959\") pod \"service-ca-9c57cc56f-kxgrb\" (UID: \"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c\") " pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.770665 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bdc2\" (UniqueName: \"kubernetes.io/projected/2a26b929-108c-43a8-a196-eedee3230161-kube-api-access-6bdc2\") pod \"dns-default-92b7k\" (UID: \"2a26b929-108c-43a8-a196-eedee3230161\") " pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.771406 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.783495 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.802434 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5sgj\" (UniqueName: \"kubernetes.io/projected/ae63cbe5-e016-4101-8fe0-72aea96d7977-kube-api-access-h5sgj\") pod \"machine-config-server-fs4jt\" (UID: \"ae63cbe5-e016-4101-8fe0-72aea96d7977\") " pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.809193 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxb4k\" (UniqueName: \"kubernetes.io/projected/77e07ef9-a0c4-4677-9689-cc571997ecf7-kube-api-access-wxb4k\") pod \"apiserver-7bbb656c7d-8nbjz\" (UID: \"77e07ef9-a0c4-4677-9689-cc571997ecf7\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.810452 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.810792 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.310743313 +0000 UTC m=+143.720400362 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.822816 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.826223 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r87sf\" (UniqueName: \"kubernetes.io/projected/36d2fb2f-6d99-4e02-a599-55ad93259804-kube-api-access-r87sf\") pod \"console-operator-58897d9998-zgvwc\" (UID: \"36d2fb2f-6d99-4e02-a599-55ad93259804\") " pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.840649 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-fs4jt" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.842559 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzb24\" (UniqueName: \"kubernetes.io/projected/9847d204-b75b-41bb-8f4b-03058aeab9fb-kube-api-access-vzb24\") pod \"package-server-manager-789f6589d5-q6jpz\" (UID: \"9847d204-b75b-41bb-8f4b-03058aeab9fb\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.848593 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-xlnlq" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.853537 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.861488 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tj6nb\" (UniqueName: \"kubernetes.io/projected/35966a08-49de-4d9a-baa2-0b1b030b2353-kube-api-access-tj6nb\") pod \"olm-operator-6b444d44fb-4zldb\" (UID: \"35966a08-49de-4d9a-baa2-0b1b030b2353\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.872133 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.883637 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxgwd\" (UniqueName: \"kubernetes.io/projected/86217928-f132-414b-a374-3e20ea531035-kube-api-access-gxgwd\") pod \"csi-hostpathplugin-48x76\" (UID: \"86217928-f132-414b-a374-3e20ea531035\") " pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.900424 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtbtm\" (UniqueName: \"kubernetes.io/projected/9648d961-f516-4332-9ff8-225f40f0af8f-kube-api-access-jtbtm\") pod \"multus-admission-controller-857f4d67dd-9tnsf\" (UID: \"9648d961-f516-4332-9ff8-225f40f0af8f\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.917707 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:01 crc kubenswrapper[4747]: E1001 06:19:01.918261 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.418238315 +0000 UTC m=+143.827895444 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.930627 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9tfm\" (UniqueName: \"kubernetes.io/projected/2ddb2a27-e181-4592-b710-69693c5a3efe-kube-api-access-l9tfm\") pod \"kube-storage-version-migrator-operator-b67b599dd-6l64z\" (UID: \"2ddb2a27-e181-4592-b710-69693c5a3efe\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.945355 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j5v4\" (UniqueName: \"kubernetes.io/projected/a879ef30-1cb2-4129-8acd-6fce3c6b88e3-kube-api-access-4j5v4\") pod \"service-ca-operator-777779d784-jrxnk\" (UID: \"a879ef30-1cb2-4129-8acd-6fce3c6b88e3\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.946095 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.970563 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vh9mz"] Oct 01 06:19:01 crc kubenswrapper[4747]: I1001 06:19:01.994983 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.015313 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.025638 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.026760 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.526728033 +0000 UTC m=+143.936385082 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.055540 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-bxlnn"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.072452 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.082548 4747 generic.go:334] "Generic (PLEG): container finished" podID="d4baa33b-4274-46b2-83d4-8e80ad9542c8" containerID="8b94f15a24b1b71d14256cb46e87ae57d25224b0992ce1e6eb952cfb543f38ba" exitCode=0 Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.082615 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" event={"ID":"d4baa33b-4274-46b2-83d4-8e80ad9542c8","Type":"ContainerDied","Data":"8b94f15a24b1b71d14256cb46e87ae57d25224b0992ce1e6eb952cfb543f38ba"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.082648 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" event={"ID":"d4baa33b-4274-46b2-83d4-8e80ad9542c8","Type":"ContainerStarted","Data":"f99bc667ffdd3b522c511a5bf4694f322b23a757cabe8d8b6d94105db4f9c2f4"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.087511 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.093292 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.098712 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.106135 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.108606 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" event={"ID":"b55da778-ff99-4064-a60e-ee50f4f4f8e6","Type":"ContainerStarted","Data":"8f2e654ba2bc8a915dbd5ef3a98ecb9933d8d4e4bf659586e3f4c11e7faea0a4"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.108643 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" event={"ID":"b55da778-ff99-4064-a60e-ee50f4f4f8e6","Type":"ContainerStarted","Data":"4b52a38f358e33b2e39db2dcfe650d6af32076c1184d03fe0002a617d1f6d05c"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.110810 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-csw7c" event={"ID":"2702589d-bd8c-4401-a5d9-2d57c88f33f6","Type":"ContainerStarted","Data":"c3254b19a1957658b03277b7475c66c6730a35f0a5ce1e9fca2b12295fc93a66"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.110831 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-csw7c" event={"ID":"2702589d-bd8c-4401-a5d9-2d57c88f33f6","Type":"ContainerStarted","Data":"f5756239249fb5fa7a19993bab78bcb3a0cb702571d8946135e92033da78712f"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.113652 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.121436 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" event={"ID":"98892cff-c8b2-48af-aa53-a3cc727ecf46","Type":"ContainerStarted","Data":"e5a4eab1c3c5516dd1ee6d543c92e482ca2e63948d6c6f68e493ce3fc8fd74bd"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.121741 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" event={"ID":"98892cff-c8b2-48af-aa53-a3cc727ecf46","Type":"ContainerStarted","Data":"bdba5751a27ae5d60c5b0a02101a918167674818c32e3c7a4133fc27b7b25efb"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.121784 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" event={"ID":"98892cff-c8b2-48af-aa53-a3cc727ecf46","Type":"ContainerStarted","Data":"96cd14643b6d3379e62d27c22c5b5b5eba8a5ae5970e4cde6e8abb5c8c2bc5c3"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.127375 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.127669 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.627656639 +0000 UTC m=+144.037313688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.128245 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.129318 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" event={"ID":"e9b0ef66-c372-4715-b069-4cdbcaf66f1f","Type":"ContainerStarted","Data":"2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.129356 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.129369 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" event={"ID":"e9b0ef66-c372-4715-b069-4cdbcaf66f1f","Type":"ContainerStarted","Data":"2aa41ee4a51de0b6fdab53b456a5885aff859576e1f0abd75b6ad1c648dd5e83"} Oct 01 06:19:02 crc kubenswrapper[4747]: W1001 06:19:02.131992 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb491ab08_e1e7_4166_b2fc_3d265a06414f.slice/crio-db598455ee1722f6c1ad41659a59ca9ba0c2a14d8ca2d1f5fbefea336a6aa406 WatchSource:0}: Error finding container db598455ee1722f6c1ad41659a59ca9ba0c2a14d8ca2d1f5fbefea336a6aa406: Status 404 returned error can't find the container with id db598455ee1722f6c1ad41659a59ca9ba0c2a14d8ca2d1f5fbefea336a6aa406 Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.132245 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" event={"ID":"e9b3bf47-d12e-437e-8b8b-15fda1a2be69","Type":"ContainerStarted","Data":"aa30dd9cb11cdd4582a51f5dd4f0cd7f8c5a1789730a9d80c94db6ce47811836"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.132276 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" event={"ID":"e9b3bf47-d12e-437e-8b8b-15fda1a2be69","Type":"ContainerStarted","Data":"59765fbaabb18a824c9beba917a432e73e915cc3e11b416a6c3da645b2010263"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.136817 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" event={"ID":"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c","Type":"ContainerStarted","Data":"3f256a115330a9adf6540d0664433d119449e018617bdfbe8e646a9f704f81b2"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.138138 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" event={"ID":"c94e27c1-1f36-4b84-9ce0-280fdb611c72","Type":"ContainerStarted","Data":"b0d7e9084fe89f2d5f7f5335adc6e75ed797256ff50ad4dcdb3c63a2562ee40c"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.139290 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" event={"ID":"ecd114c8-714d-447c-a9d6-49be4118f010","Type":"ContainerStarted","Data":"5f4bccf85d068709401a0dca7295d79ee8fccab0863b567b2b99485ebdc0e620"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.139308 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" event={"ID":"ecd114c8-714d-447c-a9d6-49be4118f010","Type":"ContainerStarted","Data":"e186345d488f9f752628505010afc5f2625dcd26478a5971eae424c88490d499"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.146793 4747 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-8jxsb container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.146843 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" podUID="e9b0ef66-c372-4715-b069-4cdbcaf66f1f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 01 06:19:02 crc kubenswrapper[4747]: W1001 06:19:02.152370 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod63fd244b_ef08_493d_80e6_1dbf110f5207.slice/crio-28d45b588a2fa3555696d860226e2212f8cce7df3e9ab4ea69e251d91904a73d WatchSource:0}: Error finding container 28d45b588a2fa3555696d860226e2212f8cce7df3e9ab4ea69e251d91904a73d: Status 404 returned error can't find the container with id 28d45b588a2fa3555696d860226e2212f8cce7df3e9ab4ea69e251d91904a73d Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.154166 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" event={"ID":"aa4e6638-81d2-47ff-8bea-beb731e7a905","Type":"ContainerStarted","Data":"639f82bbe35f7e7d0544183ad31a8ac66038b852b7e9f6ca805ec4b9a61eea68"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.154347 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" event={"ID":"aa4e6638-81d2-47ff-8bea-beb731e7a905","Type":"ContainerStarted","Data":"bc8af1ae9246270a7a998540eb8de7352e204d52379349b8fd130d3c15345225"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.168451 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" event={"ID":"75cbdf07-2be3-4b03-9241-e7e7d7de0f70","Type":"ContainerStarted","Data":"040439a1cac91895ecef8161ed7a979a8a6e7cc4516a30c5e02aec673c3263df"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.174690 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-48x76" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.183788 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" event={"ID":"5a7d70fb-39d4-4f9c-a40c-b321880d83a7","Type":"ContainerStarted","Data":"e71f06dd09543ab8194778c0698ebaffaf86b0ed8d1808394b717864f0eedfb7"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.183837 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" event={"ID":"5a7d70fb-39d4-4f9c-a40c-b321880d83a7","Type":"ContainerStarted","Data":"6aec87108cc05f157dfe4482b34ce5910c9aedb96551ff0b3a30ec957c19929f"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.184298 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.185633 4747 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2ct5k container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.185676 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" podUID="5a7d70fb-39d4-4f9c-a40c-b321880d83a7" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.195564 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" event={"ID":"d25ffb7a-458f-4122-a01a-33edd82267cd","Type":"ContainerStarted","Data":"a1c19394b83bc406fa4875f89d2c41c1271e66c8c6f1518edc0ac0746a4316de"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.195617 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.200525 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" event={"ID":"b35796dc-d78b-48ec-be7b-45d072b20fe8","Type":"ContainerStarted","Data":"fa24d3dc2abc747aeef354d36b2df016adceb90dfbc1148cb62f5560f013dfa8"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.200565 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" event={"ID":"b35796dc-d78b-48ec-be7b-45d072b20fe8","Type":"ContainerStarted","Data":"287ad9362953ff9d7d51778d8cd3bf30ad8abff9ef9f095da210950d57ccd96f"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.200894 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.207021 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5nq8g" event={"ID":"0b633eab-bed0-436d-ad6d-bd7f315dc172","Type":"ContainerStarted","Data":"5323351cf5453d4dac0db566f27d9ea426e4748be9cb3c4ac53f604e94524813"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.207067 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5nq8g" event={"ID":"0b633eab-bed0-436d-ad6d-bd7f315dc172","Type":"ContainerStarted","Data":"c72efa165132da823374ca90ca4da1a06e7d10e4a9a57df50dcd5f71fe644d69"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.208221 4747 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-q2zvk container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" start-of-body= Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.208264 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" podUID="b35796dc-d78b-48ec-be7b-45d072b20fe8" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.211322 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" event={"ID":"9d504980-0efe-4f16-b3ec-a94e4c0e0384","Type":"ContainerStarted","Data":"05671cf6609bc4d2069365435a17aad42019f2eeffd56986af585ff0ad11bc27"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.214509 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" event={"ID":"e63497a4-0ba8-48a5-88af-dc20ef283130","Type":"ContainerStarted","Data":"7e420457633f975a0813e352197607f6f414be3867c03a3a2a5d7057b5e3c618"} Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.214532 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" event={"ID":"e63497a4-0ba8-48a5-88af-dc20ef283130","Type":"ContainerStarted","Data":"c43236f5bba3eb720fed41c626864bbe266ea66e19b7d29f9842ce2667cd4ee1"} Oct 01 06:19:02 crc kubenswrapper[4747]: W1001 06:19:02.219759 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf76f06cb_49af_4c1f_a27f_776b2bac7b33.slice/crio-0c2fa7a41b0187b5f5db57facc154f23174009219f28e584c5aae1bd5d722203 WatchSource:0}: Error finding container 0c2fa7a41b0187b5f5db57facc154f23174009219f28e584c5aae1bd5d722203: Status 404 returned error can't find the container with id 0c2fa7a41b0187b5f5db57facc154f23174009219f28e584c5aae1bd5d722203 Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.228855 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.231093 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.731070822 +0000 UTC m=+144.140727871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: W1001 06:19:02.235327 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5175887_79c8_46f9_9708_d4d9afea026d.slice/crio-3021c655fa7cf3a2c9c23774e394c06a4935e143a1d1c4c1799067d9c8c5b664 WatchSource:0}: Error finding container 3021c655fa7cf3a2c9c23774e394c06a4935e143a1d1c4c1799067d9c8c5b664: Status 404 returned error can't find the container with id 3021c655fa7cf3a2c9c23774e394c06a4935e143a1d1c4c1799067d9c8c5b664 Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.332003 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.339232 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.839218471 +0000 UTC m=+144.248875520 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.434609 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.435214 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:02.93501369 +0000 UTC m=+144.344670739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.458976 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.506876 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-kxgrb"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.513805 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.513887 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.545408 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.545803 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.045786769 +0000 UTC m=+144.455443818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.568791 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" podStartSLOduration=123.568769755 podStartE2EDuration="2m3.568769755s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:02.543041816 +0000 UTC m=+143.952698865" watchObservedRunningTime="2025-10-01 06:19:02.568769755 +0000 UTC m=+143.978426804" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.570464 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.578072 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw"] Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.590356 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-wk2wx" podStartSLOduration=123.590340124 podStartE2EDuration="2m3.590340124s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:02.588735161 +0000 UTC m=+143.998392210" watchObservedRunningTime="2025-10-01 06:19:02.590340124 +0000 UTC m=+143.999997173" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.657627 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.658287 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.158261634 +0000 UTC m=+144.567918683 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.669961 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" podStartSLOduration=122.669947548 podStartE2EDuration="2m2.669947548s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:02.643679214 +0000 UTC m=+144.053336263" watchObservedRunningTime="2025-10-01 06:19:02.669947548 +0000 UTC m=+144.079604597" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.694476 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-pghqr" podStartSLOduration=123.694460675 podStartE2EDuration="2m3.694460675s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:02.693287234 +0000 UTC m=+144.102944283" watchObservedRunningTime="2025-10-01 06:19:02.694460675 +0000 UTC m=+144.104117724" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.759259 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.759648 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.259633743 +0000 UTC m=+144.669290792 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.759800 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-92b7k"] Oct 01 06:19:02 crc kubenswrapper[4747]: W1001 06:19:02.811344 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8ba6078_ec36_41f2_a9c4_01f4cfbce71c.slice/crio-da03bfc867fdbba7176ce96824a4c5517f702a94754cfd55c5beba6bea805f37 WatchSource:0}: Error finding container da03bfc867fdbba7176ce96824a4c5517f702a94754cfd55c5beba6bea805f37: Status 404 returned error can't find the container with id da03bfc867fdbba7176ce96824a4c5517f702a94754cfd55c5beba6bea805f37 Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.826564 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-xlnlq"] Oct 01 06:19:02 crc kubenswrapper[4747]: W1001 06:19:02.847544 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07aacdeb_d996_4747_a1d8_4803d5f7f4a7.slice/crio-0391b8aaea9d39f5703839830d806a25d02819ba486d32986bf11bcaaf5734ae WatchSource:0}: Error finding container 0391b8aaea9d39f5703839830d806a25d02819ba486d32986bf11bcaaf5734ae: Status 404 returned error can't find the container with id 0391b8aaea9d39f5703839830d806a25d02819ba486d32986bf11bcaaf5734ae Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.863347 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.863679 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.363648481 +0000 UTC m=+144.773305530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.864689 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.865051 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.365039019 +0000 UTC m=+144.774696068 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.886867 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-csw7c" podStartSLOduration=123.886850744 podStartE2EDuration="2m3.886850744s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:02.885838176 +0000 UTC m=+144.295495225" watchObservedRunningTime="2025-10-01 06:19:02.886850744 +0000 UTC m=+144.296507793" Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.966497 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:02 crc kubenswrapper[4747]: E1001 06:19:02.966784 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.466770556 +0000 UTC m=+144.876427595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:02 crc kubenswrapper[4747]: I1001 06:19:02.978008 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qnv2z"] Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.068197 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.068895 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.568884304 +0000 UTC m=+144.978541353 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.104048 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.148119 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:03 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:03 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:03 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.148202 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.177521 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.177875 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.677718121 +0000 UTC m=+145.087375170 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.193380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.194124 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.69408208 +0000 UTC m=+145.103739129 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.209812 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-z4bz4" podStartSLOduration=124.209783551 podStartE2EDuration="2m4.209783551s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:03.209582786 +0000 UTC m=+144.619239835" watchObservedRunningTime="2025-10-01 06:19:03.209783551 +0000 UTC m=+144.619440600" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.287181 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xlnlq" event={"ID":"dc3f0a86-ed2e-452b-b829-e2fc65bbac66","Type":"ContainerStarted","Data":"8eff867d849d8811eb66b82a36f6a182813885465c8c9437c12a5e484461b7cb"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.328494 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.328781 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.82873766 +0000 UTC m=+145.238394709 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.329376 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.329946 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.829927882 +0000 UTC m=+145.239584931 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.383364 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-nsd9h" podStartSLOduration=124.383296033 podStartE2EDuration="2m4.383296033s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:03.349061845 +0000 UTC m=+144.758718924" watchObservedRunningTime="2025-10-01 06:19:03.383296033 +0000 UTC m=+144.792953082" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.392250 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" event={"ID":"07aacdeb-d996-4747-a1d8-4803d5f7f4a7","Type":"ContainerStarted","Data":"0391b8aaea9d39f5703839830d806a25d02819ba486d32986bf11bcaaf5734ae"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.437275 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.469699 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:03.969661788 +0000 UTC m=+145.379318837 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.477395 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" event={"ID":"5378035b-02a7-42d4-9c55-91de32b377c0","Type":"ContainerStarted","Data":"0825afef2cb5ef5844c408f4c7b2b4615b9d3b63387c841de5264ef4d57acae5"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.480389 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-5nq8g" podStartSLOduration=124.480355495 podStartE2EDuration="2m4.480355495s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:03.435283106 +0000 UTC m=+144.844940165" watchObservedRunningTime="2025-10-01 06:19:03.480355495 +0000 UTC m=+144.890012544" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.507994 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz"] Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.510589 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-zgvwc"] Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.517479 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" event={"ID":"b491ab08-e1e7-4166-b2fc-3d265a06414f","Type":"ContainerStarted","Data":"db598455ee1722f6c1ad41659a59ca9ba0c2a14d8ca2d1f5fbefea336a6aa406"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.518797 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" event={"ID":"c94e27c1-1f36-4b84-9ce0-280fdb611c72","Type":"ContainerStarted","Data":"4e1bf839a26c8c8649815e57a0d269baa47ec78f2be78af8f4307906a65c3998"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.530247 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" event={"ID":"f76f06cb-49af-4c1f-a27f-776b2bac7b33","Type":"ContainerStarted","Data":"0c2fa7a41b0187b5f5db57facc154f23174009219f28e584c5aae1bd5d722203"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.539480 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.540598 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.040585569 +0000 UTC m=+145.450242618 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.551356 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" event={"ID":"5efb7422-a464-4daf-991f-808ba693495c","Type":"ContainerStarted","Data":"312badfd7ab7dd6b89eccd90295d8152046c5168297298d9d560773ee8177cad"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.552476 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-fs4jt" event={"ID":"ae63cbe5-e016-4101-8fe0-72aea96d7977","Type":"ContainerStarted","Data":"588f8172b3018750b08e7d5848cddc16861258750ebce839728b74b3ba18a4fd"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.559611 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb"] Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.580063 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-48x76"] Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.580886 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" event={"ID":"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c","Type":"ContainerStarted","Data":"faffeb6d7e35ba05b04a5ccacf57e5e3162a5665834f3309a8359eb4c5608b15"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.587644 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6v524" podStartSLOduration=124.587617821 podStartE2EDuration="2m4.587617821s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:03.523956284 +0000 UTC m=+144.933613333" watchObservedRunningTime="2025-10-01 06:19:03.587617821 +0000 UTC m=+144.997274870" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.600081 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z"] Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.605945 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.605985 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" event={"ID":"9d504980-0efe-4f16-b3ec-a94e4c0e0384","Type":"ContainerStarted","Data":"fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.607548 4747 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-284zz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.607691 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" podUID="9d504980-0efe-4f16-b3ec-a94e4c0e0384" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.615115 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-92b7k" event={"ID":"2a26b929-108c-43a8-a196-eedee3230161","Type":"ContainerStarted","Data":"cda7a73e14958f9c6ca2df5811960f7073fc007df3231226dbe60bb18c247445"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.615175 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz"] Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.616488 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" event={"ID":"37ccee71-1ac4-49a3-bef1-74f2fe4babe7","Type":"ContainerStarted","Data":"6c798f686d374d81553b7e2a077e5c624ec6bab7c9a02c1a4f1528570d48cb3f"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.617787 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" event={"ID":"75cbdf07-2be3-4b03-9241-e7e7d7de0f70","Type":"ContainerStarted","Data":"f812984c6d581e2cab7419447991d1d78ce8162d90679f1994f90f675ac12353"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.624087 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" event={"ID":"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c","Type":"ContainerStarted","Data":"da03bfc867fdbba7176ce96824a4c5517f702a94754cfd55c5beba6bea805f37"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.635054 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" podStartSLOduration=124.635028842 podStartE2EDuration="2m4.635028842s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:03.594350491 +0000 UTC m=+145.004007540" watchObservedRunningTime="2025-10-01 06:19:03.635028842 +0000 UTC m=+145.044685891" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.641063 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.641323 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk"] Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.641411 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.141395862 +0000 UTC m=+145.551052911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.641447 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.642302 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.142294886 +0000 UTC m=+145.551951935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.655530 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" event={"ID":"63fd244b-ef08-493d-80e6-1dbf110f5207","Type":"ContainerStarted","Data":"28d45b588a2fa3555696d860226e2212f8cce7df3e9ab4ea69e251d91904a73d"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.675448 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bxlnn" event={"ID":"e5175887-79c8-46f9-9708-d4d9afea026d","Type":"ContainerStarted","Data":"3021c655fa7cf3a2c9c23774e394c06a4935e143a1d1c4c1799067d9c8c5b664"} Oct 01 06:19:03 crc kubenswrapper[4747]: W1001 06:19:03.711863 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86217928_f132_414b_a374_3e20ea531035.slice/crio-0226bd98491e485022a4b8cda29e6ec5a32aebaaff0a41ec0235f6e9728903df WatchSource:0}: Error finding container 0226bd98491e485022a4b8cda29e6ec5a32aebaaff0a41ec0235f6e9728903df: Status 404 returned error can't find the container with id 0226bd98491e485022a4b8cda29e6ec5a32aebaaff0a41ec0235f6e9728903df Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.712257 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" event={"ID":"f8aec98c-b046-465a-b712-0db02e816e4f","Type":"ContainerStarted","Data":"a962383648a502146eb00591ec9ab2d960497a72d94105d88b5538f7d37153df"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.722498 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" event={"ID":"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5","Type":"ContainerStarted","Data":"646c59b2538142ba8a26c8a28ab011552fa4c438a31a1bc5a6bde36517a99610"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.731355 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" event={"ID":"492d31e9-e510-4e8b-9042-38ca4be5b283","Type":"ContainerStarted","Data":"ef3d5a1f0e32622224bcf4e64a364ee2d3597e060bc49961e52465ab9d1e2f0b"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.738721 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" event={"ID":"d25ffb7a-458f-4122-a01a-33edd82267cd","Type":"ContainerStarted","Data":"68a4281a9820434dd10cad1a077fcc1afaffa4f0def37753efe9067b8345671d"} Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.748655 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.753283 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.754630 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.254605197 +0000 UTC m=+145.664262246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.764820 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.762023 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-9tnsf"] Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.765288 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.265276634 +0000 UTC m=+145.674933683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.778087 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2ct5k" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.885600 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.887173 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.387148101 +0000 UTC m=+145.796805150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.957939 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" podStartSLOduration=123.957920209 podStartE2EDuration="2m3.957920209s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:03.952712179 +0000 UTC m=+145.362369228" watchObservedRunningTime="2025-10-01 06:19:03.957920209 +0000 UTC m=+145.367577258" Oct 01 06:19:03 crc kubenswrapper[4747]: I1001 06:19:03.994739 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:03 crc kubenswrapper[4747]: E1001 06:19:03.995420 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.495409073 +0000 UTC m=+145.905066122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.090207 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-ldwjw" podStartSLOduration=125.090191575 podStartE2EDuration="2m5.090191575s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:04.041882489 +0000 UTC m=+145.451539568" watchObservedRunningTime="2025-10-01 06:19:04.090191575 +0000 UTC m=+145.499848624" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.098772 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.099113 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.599100103 +0000 UTC m=+146.008757142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.127949 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-899r6"] Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.130245 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.136848 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.137154 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2rtkg" podStartSLOduration=125.137140674 podStartE2EDuration="2m5.137140674s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:04.135734886 +0000 UTC m=+145.545391935" watchObservedRunningTime="2025-10-01 06:19:04.137140674 +0000 UTC m=+145.546797723" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.149249 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:04 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:04 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:04 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.149321 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.157723 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-899r6"] Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.199847 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5nhk\" (UniqueName: \"kubernetes.io/projected/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-kube-api-access-k5nhk\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.199892 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.199932 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-catalog-content\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.199979 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-utilities\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.200702 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.700685697 +0000 UTC m=+146.110342746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.240893 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" podStartSLOduration=125.240860855 podStartE2EDuration="2m5.240860855s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:04.185080969 +0000 UTC m=+145.594738018" watchObservedRunningTime="2025-10-01 06:19:04.240860855 +0000 UTC m=+145.650517914" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.243597 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.298484 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-9wgz7" podStartSLOduration=125.298468149 podStartE2EDuration="2m5.298468149s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:04.297151443 +0000 UTC m=+145.706808482" watchObservedRunningTime="2025-10-01 06:19:04.298468149 +0000 UTC m=+145.708125198" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.310670 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.310946 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-utilities\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.311031 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5nhk\" (UniqueName: \"kubernetes.io/projected/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-kube-api-access-k5nhk\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.311100 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-catalog-content\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.312635 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-catalog-content\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.313926 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-utilities\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.325832 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w8ltr"] Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.326738 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.327216 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.812730771 +0000 UTC m=+146.222387820 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.335908 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.353518 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w8ltr"] Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.381731 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5nhk\" (UniqueName: \"kubernetes.io/projected/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-kube-api-access-k5nhk\") pod \"certified-operators-899r6\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.414845 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc4cs\" (UniqueName: \"kubernetes.io/projected/8e6ee5ef-246c-41dd-a003-d590e1e1119b-kube-api-access-xc4cs\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.414905 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.414948 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-utilities\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.415022 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-catalog-content\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.415439 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:04.915424154 +0000 UTC m=+146.325081203 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.499843 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.517078 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.517544 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-utilities\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.517619 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-catalog-content\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.517657 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc4cs\" (UniqueName: \"kubernetes.io/projected/8e6ee5ef-246c-41dd-a003-d590e1e1119b-kube-api-access-xc4cs\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.518096 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.018080447 +0000 UTC m=+146.427737496 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.518480 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-utilities\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.518703 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-catalog-content\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.531056 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-ss76n"] Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.532012 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.603541 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc4cs\" (UniqueName: \"kubernetes.io/projected/8e6ee5ef-246c-41dd-a003-d590e1e1119b-kube-api-access-xc4cs\") pod \"community-operators-w8ltr\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.605874 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ss76n"] Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.626930 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-catalog-content\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.627166 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzhmj\" (UniqueName: \"kubernetes.io/projected/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-kube-api-access-fzhmj\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.634415 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-utilities\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.634526 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.634955 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.134942189 +0000 UTC m=+146.544599238 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.712743 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hnkg5"] Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.714033 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.714791 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.733229 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hnkg5"] Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.736819 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.737008 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-catalog-content\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.737076 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzhmj\" (UniqueName: \"kubernetes.io/projected/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-kube-api-access-fzhmj\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.737105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-utilities\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.737473 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-utilities\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.737541 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.237527569 +0000 UTC m=+146.647184618 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.737727 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-catalog-content\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.788034 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzhmj\" (UniqueName: \"kubernetes.io/projected/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-kube-api-access-fzhmj\") pod \"certified-operators-ss76n\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.842495 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.842539 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-utilities\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.842575 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-catalog-content\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.842624 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwb6w\" (UniqueName: \"kubernetes.io/projected/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-kube-api-access-hwb6w\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.842910 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.342891995 +0000 UTC m=+146.752549044 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.895902 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" event={"ID":"b491ab08-e1e7-4166-b2fc-3d265a06414f","Type":"ContainerStarted","Data":"78f051e434a9881683f9381df7dabf961d709b88b18180fd2da27bcb2f99986e"} Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.895965 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" event={"ID":"b491ab08-e1e7-4166-b2fc-3d265a06414f","Type":"ContainerStarted","Data":"9a9a6d6d1645f31c99ecdab9c71c4f62f6773888bf74dd3339fbb8085222cad2"} Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.898664 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48x76" event={"ID":"86217928-f132-414b-a374-3e20ea531035","Type":"ContainerStarted","Data":"0226bd98491e485022a4b8cda29e6ec5a32aebaaff0a41ec0235f6e9728903df"} Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.901547 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" event={"ID":"a879ef30-1cb2-4129-8acd-6fce3c6b88e3","Type":"ContainerStarted","Data":"c32ae7a3ad91f1af9d4247d1b26816f2ccc003333f874e865408100f571f1727"} Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.929156 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.943659 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.943973 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-utilities\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.944012 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-catalog-content\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.944054 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwb6w\" (UniqueName: \"kubernetes.io/projected/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-kube-api-access-hwb6w\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: E1001 06:19:04.944435 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.444420326 +0000 UTC m=+146.854077376 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.944807 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-utilities\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.945022 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-catalog-content\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.946419 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-vh9mz" podStartSLOduration=124.94639129 podStartE2EDuration="2m4.94639129s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:04.934523001 +0000 UTC m=+146.344180040" watchObservedRunningTime="2025-10-01 06:19:04.94639129 +0000 UTC m=+146.356048339" Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.987263 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-zgvwc" event={"ID":"36d2fb2f-6d99-4e02-a599-55ad93259804","Type":"ContainerStarted","Data":"2bfea7c7fbd50ca3f664b013a12cd0e5b4a54c0def7093f31d9040440568543a"} Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.987301 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-zgvwc" event={"ID":"36d2fb2f-6d99-4e02-a599-55ad93259804","Type":"ContainerStarted","Data":"2ade0007f6030b7711eed1e32daad6ca1fde91730f9f31b1b957009dcc491c34"} Oct 01 06:19:04 crc kubenswrapper[4747]: I1001 06:19:04.995346 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.012822 4747 patch_prober.go:28] interesting pod/console-operator-58897d9998-zgvwc container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/readyz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.012876 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-zgvwc" podUID="36d2fb2f-6d99-4e02-a599-55ad93259804" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/readyz\": dial tcp 10.217.0.22:8443: connect: connection refused" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.024426 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" podStartSLOduration=125.024398421 podStartE2EDuration="2m5.024398421s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.01318797 +0000 UTC m=+146.422845019" watchObservedRunningTime="2025-10-01 06:19:05.024398421 +0000 UTC m=+146.434055470" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.043219 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" event={"ID":"9847d204-b75b-41bb-8f4b-03058aeab9fb","Type":"ContainerStarted","Data":"87db97c1bfd2bfd3a08d810490fcf49c299193846b5c95306daff887a093c711"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.043262 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" event={"ID":"9847d204-b75b-41bb-8f4b-03058aeab9fb","Type":"ContainerStarted","Data":"a0cd1ccfe1075a0b3d044b20e7c7797b15fa6c7378a0d8b54bdf3068c74e53f6"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.046087 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.050082 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwb6w\" (UniqueName: \"kubernetes.io/projected/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-kube-api-access-hwb6w\") pod \"community-operators-hnkg5\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.051606 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.551582 +0000 UTC m=+146.961239049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.086553 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-fs4jt" event={"ID":"ae63cbe5-e016-4101-8fe0-72aea96d7977","Type":"ContainerStarted","Data":"46b4548eecca34ee7298a4d85c31ebd773062dc8aee721592b7b811602c42f40"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.087367 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.102013 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" event={"ID":"5378035b-02a7-42d4-9c55-91de32b377c0","Type":"ContainerStarted","Data":"92c4bf66129ce70f824f2167be3e7e10c11d44b5431461f93d2bf4e7bdaba7df"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.110369 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:05 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:05 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:05 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.110433 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.149768 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.150902 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.650872032 +0000 UTC m=+147.060529301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.173030 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" event={"ID":"f8aec98c-b046-465a-b712-0db02e816e4f","Type":"ContainerStarted","Data":"606d4bc4cef47e19b90869188cf507337d7a4e2dead92a45110c7f3f644f3bcc"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.173087 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" event={"ID":"f8aec98c-b046-465a-b712-0db02e816e4f","Type":"ContainerStarted","Data":"c79e737b20a898f1265322e9d283f0376a4dde7e3698be847035ebb4e7d4a8ae"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.176239 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-zgvwc" podStartSLOduration=126.176224982 podStartE2EDuration="2m6.176224982s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.145298053 +0000 UTC m=+146.554955092" watchObservedRunningTime="2025-10-01 06:19:05.176224982 +0000 UTC m=+146.585882031" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.176575 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-fs4jt" podStartSLOduration=7.176570961 podStartE2EDuration="7.176570961s" podCreationTimestamp="2025-10-01 06:18:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.173906089 +0000 UTC m=+146.583563138" watchObservedRunningTime="2025-10-01 06:19:05.176570961 +0000 UTC m=+146.586228010" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.216698 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" event={"ID":"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5","Type":"ContainerStarted","Data":"7e18c323293e3d00441759d43378d436345e02717647aad43e6c62de9a7af647"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.217250 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.233767 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-w2ctv" podStartSLOduration=125.233721513 podStartE2EDuration="2m5.233721513s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.217699543 +0000 UTC m=+146.627356592" watchObservedRunningTime="2025-10-01 06:19:05.233721513 +0000 UTC m=+146.643378562" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.249797 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" podStartSLOduration=125.249770343 podStartE2EDuration="2m5.249770343s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.248457688 +0000 UTC m=+146.658114737" watchObservedRunningTime="2025-10-01 06:19:05.249770343 +0000 UTC m=+146.659427392" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.252377 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.254247 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.754228602 +0000 UTC m=+147.163885651 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.307287 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-xccrp" podStartSLOduration=125.307250924 podStartE2EDuration="2m5.307250924s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.279962693 +0000 UTC m=+146.689619762" watchObservedRunningTime="2025-10-01 06:19:05.307250924 +0000 UTC m=+146.716907973" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.339436 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" event={"ID":"d4baa33b-4274-46b2-83d4-8e80ad9542c8","Type":"ContainerStarted","Data":"61e8dd31c6a5242c6f71b6433cbadbfabbe9cfb9459bd495d87563f9b0f7b122"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.339523 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.339854 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" event={"ID":"f76f06cb-49af-4c1f-a27f-776b2bac7b33","Type":"ContainerStarted","Data":"5bfef27402df2ebc3dce3e9973827096154ebfcb8d6ba916eb897c9343abaa49"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.339925 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" event={"ID":"f76f06cb-49af-4c1f-a27f-776b2bac7b33","Type":"ContainerStarted","Data":"d1952b968ac25bc9b04007dfd82cbc477a7b50559e2e6714e427972289b0308a"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.342542 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" event={"ID":"63fd244b-ef08-493d-80e6-1dbf110f5207","Type":"ContainerStarted","Data":"edb5774b94b897aa15d04b3dd7f789410829113e616a6ed8ba408f3f722d0aca"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.345051 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" event={"ID":"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5","Type":"ContainerStarted","Data":"1c6fea2b028c8a958b996461dd5c61ac8c6c6df0339213bfd915694688eadbbc"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.352403 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" event={"ID":"492d31e9-e510-4e8b-9042-38ca4be5b283","Type":"ContainerStarted","Data":"7689cd860666aa93325e5059a999a1723a2c030f2a1e55a45db4abb3a67e1ec1"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.366951 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.368185 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" podStartSLOduration=126.368163507 podStartE2EDuration="2m6.368163507s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.362003102 +0000 UTC m=+146.771660141" watchObservedRunningTime="2025-10-01 06:19:05.368163507 +0000 UTC m=+146.777820556" Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.371635 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.871583829 +0000 UTC m=+147.281240878 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.386658 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" event={"ID":"9648d961-f516-4332-9ff8-225f40f0af8f","Type":"ContainerStarted","Data":"e0fe495799fa87e046245d62dd59523221af2ea7ce77bf23c13f9e9769eacbc0"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.411860 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-92b7k" event={"ID":"2a26b929-108c-43a8-a196-eedee3230161","Type":"ContainerStarted","Data":"c0f9015a175f79094f00a54e7510de0ebd1205773163aab742e24016c616cdff"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.433886 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" podStartSLOduration=125.433869079 podStartE2EDuration="2m5.433869079s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.40293694 +0000 UTC m=+146.812593989" watchObservedRunningTime="2025-10-01 06:19:05.433869079 +0000 UTC m=+146.843526128" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.434745 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-86zb4" podStartSLOduration=126.434739302 podStartE2EDuration="2m6.434739302s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.432168103 +0000 UTC m=+146.841825152" watchObservedRunningTime="2025-10-01 06:19:05.434739302 +0000 UTC m=+146.844396351" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.439351 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" event={"ID":"77e07ef9-a0c4-4677-9689-cc571997ecf7","Type":"ContainerStarted","Data":"4ad5ae72929e86e2cdebe200129c520d1cecb23e436e217900c837c950eee2c7"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.462686 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-kcszh" event={"ID":"e9b3bf47-d12e-437e-8b8b-15fda1a2be69","Type":"ContainerStarted","Data":"b6db5934063e4fd8f76c8c76889406ac12b35ac55f5eb6934b9c4623e1986203"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.474430 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.480387 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:05.980366676 +0000 UTC m=+147.390023815 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.503015 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-bxlnn" event={"ID":"e5175887-79c8-46f9-9708-d4d9afea026d","Type":"ContainerStarted","Data":"fe8b0e77c67eb0c27eabdd4333a31949e22730b85590e50bb49d2137ed9641f2"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.511479 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-bxlnn" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.513572 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-bxlnn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.513644 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bxlnn" podUID="e5175887-79c8-46f9-9708-d4d9afea026d" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.527206 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w8ltr"] Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.537571 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" event={"ID":"d8ba6078-ec36-41f2-a9c4-01f4cfbce71c","Type":"ContainerStarted","Data":"93b53e629d8b000f6ab9cece45667b1cec7cfd71061577ca68dd73200b31f9fc"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.567258 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" event={"ID":"eaddcad0-cef4-4919-84c0-3edbc3b2bd6c","Type":"ContainerStarted","Data":"2bfaaab145d7b68da991887a7d3b3931fd3726e77d9c36910f5bb2825004bf28"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.576150 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.576702 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.076670908 +0000 UTC m=+147.486327957 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.579135 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pjjgz" podStartSLOduration=126.579110922 podStartE2EDuration="2m6.579110922s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.552379496 +0000 UTC m=+146.962036555" watchObservedRunningTime="2025-10-01 06:19:05.579110922 +0000 UTC m=+146.988767971" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.580114 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-899r6"] Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.595847 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-bxlnn" podStartSLOduration=126.595832071 podStartE2EDuration="2m6.595832071s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.595269936 +0000 UTC m=+147.004926985" watchObservedRunningTime="2025-10-01 06:19:05.595832071 +0000 UTC m=+147.005489120" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.656970 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-kxgrb" podStartSLOduration=125.656946149 podStartE2EDuration="2m5.656946149s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.654217976 +0000 UTC m=+147.063875025" watchObservedRunningTime="2025-10-01 06:19:05.656946149 +0000 UTC m=+147.066603188" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.671023 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-xlnlq" event={"ID":"dc3f0a86-ed2e-452b-b829-e2fc65bbac66","Type":"ContainerStarted","Data":"ba6ddf91a3355397c1b227ac5eba3cb0717226034754ce84b803cc167a23b5f1"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.677140 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.677906 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.698562 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.17973141 +0000 UTC m=+147.589388459 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.767101 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" event={"ID":"2ddb2a27-e181-4592-b710-69693c5a3efe","Type":"ContainerStarted","Data":"db2f03419c50e39a5dbf1a9c6ad6f5c1cc9b24a92ff03e1f42cc59d86eb08f5c"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.767144 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" event={"ID":"2ddb2a27-e181-4592-b710-69693c5a3efe","Type":"ContainerStarted","Data":"163118607a21d6a22daca55113b77ae1a842d47640b24d5c15ba92cf120e6215"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.768884 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.768943 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.781349 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.782412 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.282388073 +0000 UTC m=+147.692045122 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.826909 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" event={"ID":"35966a08-49de-4d9a-baa2-0b1b030b2353","Type":"ContainerStarted","Data":"c36d80735dfe866ce79df44692d5463ace88ce78a4227d01325b2ca310cea6c5"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.827784 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.838167 4747 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-4zldb container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.838237 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" podUID="35966a08-49de-4d9a-baa2-0b1b030b2353" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.841245 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-xlnlq" podStartSLOduration=7.841233301 podStartE2EDuration="7.841233301s" podCreationTimestamp="2025-10-01 06:18:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.839942336 +0000 UTC m=+147.249599385" watchObservedRunningTime="2025-10-01 06:19:05.841233301 +0000 UTC m=+147.250890340" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.850916 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-ss76n"] Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.880471 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-wvm4n" podStartSLOduration=127.880442142 podStartE2EDuration="2m7.880442142s" podCreationTimestamp="2025-10-01 06:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.873535977 +0000 UTC m=+147.283193026" watchObservedRunningTime="2025-10-01 06:19:05.880442142 +0000 UTC m=+147.290099191" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.883030 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.884443 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.384431938 +0000 UTC m=+147.794088987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.902952 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" event={"ID":"07aacdeb-d996-4747-a1d8-4803d5f7f4a7","Type":"ContainerStarted","Data":"0f13c715ce5f3feb284a02a0b8d48362f4a27d0ac3bff9a99b0d198e0f39eced"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.919867 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" podStartSLOduration=125.919847588 podStartE2EDuration="2m5.919847588s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.917085474 +0000 UTC m=+147.326742523" watchObservedRunningTime="2025-10-01 06:19:05.919847588 +0000 UTC m=+147.329504647" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.928942 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" event={"ID":"37ccee71-1ac4-49a3-bef1-74f2fe4babe7","Type":"ContainerStarted","Data":"734b3de4930fc36980bc36ff9cf89cf3d715d5175e94dc5f3fff7c5c3da4db02"} Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.929968 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:05 crc kubenswrapper[4747]: I1001 06:19:05.987355 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:05 crc kubenswrapper[4747]: E1001 06:19:05.988044 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.488028555 +0000 UTC m=+147.897685604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.002324 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-6l64z" podStartSLOduration=126.002304249 podStartE2EDuration="2m6.002304249s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:05.967572298 +0000 UTC m=+147.377229347" watchObservedRunningTime="2025-10-01 06:19:06.002304249 +0000 UTC m=+147.411961298" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.004133 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" podStartSLOduration=126.004128248 podStartE2EDuration="2m6.004128248s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:06.002079312 +0000 UTC m=+147.411736361" watchObservedRunningTime="2025-10-01 06:19:06.004128248 +0000 UTC m=+147.413785297" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.033015 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" event={"ID":"5efb7422-a464-4daf-991f-808ba693495c","Type":"ContainerStarted","Data":"a116caa0069956e99971bc8ba187995a5599e6d295f0bac3a5f80ced7b9ca616"} Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.053614 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.094323 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" podStartSLOduration=126.094299515 podStartE2EDuration="2m6.094299515s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:06.040332048 +0000 UTC m=+147.449989097" watchObservedRunningTime="2025-10-01 06:19:06.094299515 +0000 UTC m=+147.503956564" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.096315 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" podStartSLOduration=127.096307399 podStartE2EDuration="2m7.096307399s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:06.093939095 +0000 UTC m=+147.503596154" watchObservedRunningTime="2025-10-01 06:19:06.096307399 +0000 UTC m=+147.505964448" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.107553 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.110253 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.610235682 +0000 UTC m=+148.019892721 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.122399 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:06 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:06 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:06 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.124886 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.214127 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.214493 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.714475297 +0000 UTC m=+148.124132346 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.298613 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hnkg5"] Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.318236 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.318604 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.818592389 +0000 UTC m=+148.228249438 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.352553 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r2t26"] Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.353817 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.369174 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.421558 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.421821 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.921776884 +0000 UTC m=+148.331433933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.422204 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwtzg\" (UniqueName: \"kubernetes.io/projected/98569525-9a56-4701-9a06-46ac13cb40c3-kube-api-access-rwtzg\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.422399 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-catalog-content\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.422559 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.422660 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-utilities\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.423017 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r2t26"] Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.423691 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:06.923678336 +0000 UTC m=+148.333335385 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.523901 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.524129 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwtzg\" (UniqueName: \"kubernetes.io/projected/98569525-9a56-4701-9a06-46ac13cb40c3-kube-api-access-rwtzg\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.524225 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-catalog-content\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.524285 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.024259862 +0000 UTC m=+148.433916911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.524328 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.524381 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-utilities\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.524990 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-utilities\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.525058 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-catalog-content\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.525217 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.025210178 +0000 UTC m=+148.434867227 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.582459 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwtzg\" (UniqueName: \"kubernetes.io/projected/98569525-9a56-4701-9a06-46ac13cb40c3-kube-api-access-rwtzg\") pod \"redhat-marketplace-r2t26\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.625295 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.625638 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.12561097 +0000 UTC m=+148.535268019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.697495 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.708657 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nsqw9"] Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.709724 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.725658 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsqw9"] Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.726792 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.726839 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.726876 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.726901 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.726943 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.732325 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.23231091 +0000 UTC m=+148.641967959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.737229 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.744202 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.744271 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.744921 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.797986 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.808046 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.822466 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.838013 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.838318 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-utilities\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.838428 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpfps\" (UniqueName: \"kubernetes.io/projected/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-kube-api-access-mpfps\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.838508 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-catalog-content\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.838577 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.338551279 +0000 UTC m=+148.748208328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.932217 4747 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-689vg container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.932472 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" podUID="37ccee71-1ac4-49a3-bef1-74f2fe4babe7" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.19:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.939646 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.939704 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-utilities\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.939852 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpfps\" (UniqueName: \"kubernetes.io/projected/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-kube-api-access-mpfps\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.939886 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-catalog-content\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.940307 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-catalog-content\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: E1001 06:19:06.940543 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.440531683 +0000 UTC m=+148.850188732 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.940767 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-utilities\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:06 crc kubenswrapper[4747]: I1001 06:19:06.982731 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpfps\" (UniqueName: \"kubernetes.io/projected/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-kube-api-access-mpfps\") pod \"redhat-marketplace-nsqw9\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.037098 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.051670 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.052075 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.552059912 +0000 UTC m=+148.961716961 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.111364 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" event={"ID":"9847d204-b75b-41bb-8f4b-03058aeab9fb","Type":"ContainerStarted","Data":"381ab1e392b403e3649f32ae372c7cbb98ddcfbd0eb8dbf29552301a89e4c5ac"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.111417 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.113178 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:07 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:07 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:07 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.113253 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.153820 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.154190 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.65417612 +0000 UTC m=+149.063833169 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.183767 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" event={"ID":"35966a08-49de-4d9a-baa2-0b1b030b2353","Type":"ContainerStarted","Data":"1a8f4e27ca5d34a318a2d144a0c6a8751b95259604350692f1a42a95f3b45687"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.211234 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-4zldb" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.225646 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-jrxnk" event={"ID":"a879ef30-1cb2-4129-8acd-6fce3c6b88e3","Type":"ContainerStarted","Data":"300002b842cbf6df9b772cf5b482070c575a058b414f0c86811bcea0e39abd68"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.247457 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-4kxcx" event={"ID":"07aacdeb-d996-4747-a1d8-4803d5f7f4a7","Type":"ContainerStarted","Data":"c6b8a0d9f327f119d49e1110a3e36eebded615976277622fce38d9d35debc56b"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.248702 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" podStartSLOduration=127.248671814 podStartE2EDuration="2m7.248671814s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:07.146036403 +0000 UTC m=+148.555693452" watchObservedRunningTime="2025-10-01 06:19:07.248671814 +0000 UTC m=+148.658328863" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.256106 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.256391 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.257355 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.757338326 +0000 UTC m=+149.166995375 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.292646 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d04a872f-a6a7-45d3-aa62-be934b7266c2-metrics-certs\") pod \"network-metrics-daemon-4g26h\" (UID: \"d04a872f-a6a7-45d3-aa62-be934b7266c2\") " pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.345233 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-bp7w9" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.345264 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-mmmb9" event={"ID":"492d31e9-e510-4e8b-9042-38ca4be5b283","Type":"ContainerStarted","Data":"846467f933ac52ef2a5e55475032ea55c78011ecdb2db51d53e58fab3924b27d"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.345287 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2gdmv"] Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.346368 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.352536 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2gdmv"] Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.362959 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.364258 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.364536 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.86452491 +0000 UTC m=+149.274181959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.372560 4747 generic.go:334] "Generic (PLEG): container finished" podID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerID="3b007c13927366562a6344f72bd0d0e07e21e5b870c77fa230d49840aa46f18a" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.372658 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hnkg5" event={"ID":"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c","Type":"ContainerDied","Data":"3b007c13927366562a6344f72bd0d0e07e21e5b870c77fa230d49840aa46f18a"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.372695 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hnkg5" event={"ID":"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c","Type":"ContainerStarted","Data":"addff842755337d677f3d0282a9f930f7d3ca3ca0ec625cb5ba327191e89001a"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.376063 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.399618 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.408055 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.412888 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.413471 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.421427 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" event={"ID":"9648d961-f516-4332-9ff8-225f40f0af8f","Type":"ContainerStarted","Data":"df9b63ca5a2d325e71803306cdcf4a1e3bdf65afbfc9d06a50f736da19a6055d"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.421463 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" event={"ID":"9648d961-f516-4332-9ff8-225f40f0af8f","Type":"ContainerStarted","Data":"ccd800e1428fcf45fc60a67cf17c22853ca5245fedff857e0f578cb8c70d705a"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.429869 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-4g26h" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.450003 4747 generic.go:334] "Generic (PLEG): container finished" podID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerID="6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.450086 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w8ltr" event={"ID":"8e6ee5ef-246c-41dd-a003-d590e1e1119b","Type":"ContainerDied","Data":"6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.450112 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w8ltr" event={"ID":"8e6ee5ef-246c-41dd-a003-d590e1e1119b","Type":"ContainerStarted","Data":"30cbd2d3177c60567157e16a090a15f34c788455e9aa4c5f014308764c1b5712"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.459874 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.468671 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.470841 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.970820099 +0000 UTC m=+149.380477148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.486775 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ss76n" event={"ID":"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c","Type":"ContainerDied","Data":"3e245d60333a68499d2a44cbda22237b61d189f0a516a0a077d48c1cff8c85a6"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.486712 4747 generic.go:334] "Generic (PLEG): container finished" podID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerID="3e245d60333a68499d2a44cbda22237b61d189f0a516a0a077d48c1cff8c85a6" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.487071 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ss76n" event={"ID":"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c","Type":"ContainerStarted","Data":"fdd4c0125d9734726991fd1b84f42acbdc66436eecf50a2c2bb5f5e244efa7b7"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.490165 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnr8d\" (UniqueName: \"kubernetes.io/projected/055f5415-eb0a-47cd-9f51-9fe5751b890a-kube-api-access-vnr8d\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.490237 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-catalog-content\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.490386 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.491076 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-utilities\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.493935 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:07.993918929 +0000 UTC m=+149.403575978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.555107 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r2t26"] Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.568000 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48x76" event={"ID":"86217928-f132-414b-a374-3e20ea531035","Type":"ContainerStarted","Data":"fc045f16aec9d37e622f3b8ab335e21325fd89bb7e5f762a540fbb582ed968f0"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.586577 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-9tnsf" podStartSLOduration=127.586559262 podStartE2EDuration="2m7.586559262s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:07.506056285 +0000 UTC m=+148.915713344" watchObservedRunningTime="2025-10-01 06:19:07.586559262 +0000 UTC m=+148.996216311" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.594177 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.594396 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnr8d\" (UniqueName: \"kubernetes.io/projected/055f5415-eb0a-47cd-9f51-9fe5751b890a-kube-api-access-vnr8d\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.594426 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.594444 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-catalog-content\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.594481 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.594524 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-utilities\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.595338 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.095322477 +0000 UTC m=+149.504979526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.596032 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-catalog-content\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.596726 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-utilities\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.611079 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rkcl6"] Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.612841 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.619869 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rkcl6"] Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.660540 4747 generic.go:334] "Generic (PLEG): container finished" podID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerID="75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.660639 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899r6" event={"ID":"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e","Type":"ContainerDied","Data":"75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.660670 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899r6" event={"ID":"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e","Type":"ContainerStarted","Data":"91d81e33c1c025150400cebe8fb08ec3f33b78035d6cde4ca9503e0a6d922330"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.661629 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnr8d\" (UniqueName: \"kubernetes.io/projected/055f5415-eb0a-47cd-9f51-9fe5751b890a-kube-api-access-vnr8d\") pod \"redhat-operators-2gdmv\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.696621 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.696681 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.696711 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.697817 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.197801015 +0000 UTC m=+149.607458064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.697889 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.716491 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.723229 4747 generic.go:334] "Generic (PLEG): container finished" podID="da478e7d-66d5-4ab9-9a01-66c1f57d8ef5" containerID="b690af1d7adab76222e85419f212f22aa41c5193493a170c854d48c8085c792f" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.723339 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" event={"ID":"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5","Type":"ContainerStarted","Data":"fc17e3152da850971cdbc1edb8e6e26ea88eddbfa9b16fdef044778b7664a3fd"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.723373 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" event={"ID":"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5","Type":"ContainerDied","Data":"b690af1d7adab76222e85419f212f22aa41c5193493a170c854d48c8085c792f"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.763768 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.774168 4747 generic.go:334] "Generic (PLEG): container finished" podID="77e07ef9-a0c4-4677-9689-cc571997ecf7" containerID="a832260b1796385b012b1a204e0a1d92334aefdb8efd815f9a592f9c8ecc8fd0" exitCode=0 Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.774264 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" event={"ID":"77e07ef9-a0c4-4677-9689-cc571997ecf7","Type":"ContainerStarted","Data":"e0295962b481ce91fea79d951e92b3d436db3323f2eb27044e8eb17dbe136ebf"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.774297 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" event={"ID":"77e07ef9-a0c4-4677-9689-cc571997ecf7","Type":"ContainerDied","Data":"a832260b1796385b012b1a204e0a1d92334aefdb8efd815f9a592f9c8ecc8fd0"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.792398 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.805435 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.805680 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-catalog-content\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.805738 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prsbr\" (UniqueName: \"kubernetes.io/projected/0ecbf263-d2a7-4f34-8623-7806e8cacbae-kube-api-access-prsbr\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.805808 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-utilities\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.806072 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.306053937 +0000 UTC m=+149.715710986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.814378 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-92b7k" event={"ID":"2a26b929-108c-43a8-a196-eedee3230161","Type":"ContainerStarted","Data":"70670d691e28034394a02f17a82a4cc0089af66fcff1013194dba3a26c56149a"} Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.814417 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.835737 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-bxlnn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.835835 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bxlnn" podUID="e5175887-79c8-46f9-9708-d4d9afea026d" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.855514 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-zgvwc" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.858142 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" podStartSLOduration=128.858120233 podStartE2EDuration="2m8.858120233s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:07.812893551 +0000 UTC m=+149.222550620" watchObservedRunningTime="2025-10-01 06:19:07.858120233 +0000 UTC m=+149.267777272" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.859655 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" podStartSLOduration=127.859648674 podStartE2EDuration="2m7.859648674s" podCreationTimestamp="2025-10-01 06:17:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:07.859388477 +0000 UTC m=+149.269045536" watchObservedRunningTime="2025-10-01 06:19:07.859648674 +0000 UTC m=+149.269305733" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.862595 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-689vg" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.894678 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-92b7k" podStartSLOduration=9.894659252 podStartE2EDuration="9.894659252s" podCreationTimestamp="2025-10-01 06:18:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:07.89420068 +0000 UTC m=+149.303857739" watchObservedRunningTime="2025-10-01 06:19:07.894659252 +0000 UTC m=+149.304316301" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.907066 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-catalog-content\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.907122 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prsbr\" (UniqueName: \"kubernetes.io/projected/0ecbf263-d2a7-4f34-8623-7806e8cacbae-kube-api-access-prsbr\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.907155 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.907211 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-utilities\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.909595 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-catalog-content\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: E1001 06:19:07.910533 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.410523218 +0000 UTC m=+149.820180267 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.911413 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-utilities\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.931283 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prsbr\" (UniqueName: \"kubernetes.io/projected/0ecbf263-d2a7-4f34-8623-7806e8cacbae-kube-api-access-prsbr\") pod \"redhat-operators-rkcl6\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:07 crc kubenswrapper[4747]: I1001 06:19:07.993422 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsqw9"] Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.003408 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.014356 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.015612 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.515597505 +0000 UTC m=+149.925254554 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.116516 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:08 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:08 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:08 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.116928 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.118093 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.118470 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.618458943 +0000 UTC m=+150.028115992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.219729 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.220037 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.720022666 +0000 UTC m=+150.129679715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.324823 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.325355 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.82534403 +0000 UTC m=+150.235001079 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.339201 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2gdmv"] Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.426258 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.426380 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.926358228 +0000 UTC m=+150.336015267 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.426501 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.426879 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:08.926866611 +0000 UTC m=+150.336523660 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.441148 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-4g26h"] Oct 01 06:19:08 crc kubenswrapper[4747]: W1001 06:19:08.495974 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd04a872f_a6a7_45d3_aa62_be934b7266c2.slice/crio-7af65d49844e909388b8b5104c5bd12ef18e92f9556fe380ba179ef3e1a43976 WatchSource:0}: Error finding container 7af65d49844e909388b8b5104c5bd12ef18e92f9556fe380ba179ef3e1a43976: Status 404 returned error can't find the container with id 7af65d49844e909388b8b5104c5bd12ef18e92f9556fe380ba179ef3e1a43976 Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.527868 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.528148 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.028126866 +0000 UTC m=+150.437783915 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.629317 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.629636 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.129624727 +0000 UTC m=+150.539281776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.647795 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.733264 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.733573 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rkcl6"] Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.733616 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.233550613 +0000 UTC m=+150.643207652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.835651 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.836629 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.336617987 +0000 UTC m=+150.746275036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.867216 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48x76" event={"ID":"86217928-f132-414b-a374-3e20ea531035","Type":"ContainerStarted","Data":"a13bd2ceea0a3409416b3d4cd475581fcfce0e2c21a7cbf1a9f02a90d1826a5e"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.869043 4747 generic.go:334] "Generic (PLEG): container finished" podID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerID="ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3" exitCode=0 Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.870011 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gdmv" event={"ID":"055f5415-eb0a-47cd-9f51-9fe5751b890a","Type":"ContainerDied","Data":"ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.870036 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gdmv" event={"ID":"055f5415-eb0a-47cd-9f51-9fe5751b890a","Type":"ContainerStarted","Data":"f3784c009faa18dd3d788788a11e96d3b84c91a11b90899365a7851879f163bc"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.873021 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce","Type":"ContainerStarted","Data":"d93cc386d8c60fca8607507affda16887afd73503998202bc1ca90b563e2d7a2"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.875197 4747 generic.go:334] "Generic (PLEG): container finished" podID="98569525-9a56-4701-9a06-46ac13cb40c3" containerID="ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c" exitCode=0 Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.875294 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r2t26" event={"ID":"98569525-9a56-4701-9a06-46ac13cb40c3","Type":"ContainerDied","Data":"ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.875322 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r2t26" event={"ID":"98569525-9a56-4701-9a06-46ac13cb40c3","Type":"ContainerStarted","Data":"88b22cf187add4323ec199f4f1d0809e7765a5aee9cb657fadc32f47841f487b"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.878277 4747 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.879309 4747 generic.go:334] "Generic (PLEG): container finished" podID="5efb7422-a464-4daf-991f-808ba693495c" containerID="a116caa0069956e99971bc8ba187995a5599e6d295f0bac3a5f80ced7b9ca616" exitCode=0 Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.879381 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" event={"ID":"5efb7422-a464-4daf-991f-808ba693495c","Type":"ContainerDied","Data":"a116caa0069956e99971bc8ba187995a5599e6d295f0bac3a5f80ced7b9ca616"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.881900 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1a42fc7e28166645362b47ee7d8bcebe5742ee525b814e3628c4c5d97581a50e"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.881941 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"fb69bde75b624cb230960e17908252fdb9dfca651183730e3bded1dd9bc032f9"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.885041 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"694779eeed6ac031638f9f72957c59705dc1e32148e4172180e7fe01113dcea0"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.885065 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"0a1511844c01188318930eb44fd67fa63559c24795b98ef5c4c6d67a55240916"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.885270 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.892772 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" event={"ID":"da478e7d-66d5-4ab9-9a01-66c1f57d8ef5","Type":"ContainerStarted","Data":"dc2093ef7cac940c923df808174e965e06f051535d30e0973e148b3e8489d10f"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.895905 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerID="6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7" exitCode=0 Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.895964 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsqw9" event={"ID":"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5","Type":"ContainerDied","Data":"6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.895982 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsqw9" event={"ID":"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5","Type":"ContainerStarted","Data":"f6e41ebdf9622ff35c16cea6620bab90c0637d83e6b7d8e9a062db5feab715c7"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.900820 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3056ec0eddfb89e40024281c0d557ac63a5b0c066fccd22f73698ea722463598"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.900894 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"5426128729d9db850f4d044115c1c5cffbfd826631abb6631a6005b82d2e51ac"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.903115 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4g26h" event={"ID":"d04a872f-a6a7-45d3-aa62-be934b7266c2","Type":"ContainerStarted","Data":"7af65d49844e909388b8b5104c5bd12ef18e92f9556fe380ba179ef3e1a43976"} Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.904805 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-bxlnn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.904861 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bxlnn" podUID="e5175887-79c8-46f9-9708-d4d9afea026d" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 01 06:19:08 crc kubenswrapper[4747]: I1001 06:19:08.940709 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:08 crc kubenswrapper[4747]: E1001 06:19:08.941904 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.441880339 +0000 UTC m=+150.851537388 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:08 crc kubenswrapper[4747]: W1001 06:19:08.989103 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ecbf263_d2a7_4f34_8623_7806e8cacbae.slice/crio-be0c23f0668f620442b18d56811efedaf217814d6ad7a87d1d0288e8a4a1a4e0 WatchSource:0}: Error finding container be0c23f0668f620442b18d56811efedaf217814d6ad7a87d1d0288e8a4a1a4e0: Status 404 returned error can't find the container with id be0c23f0668f620442b18d56811efedaf217814d6ad7a87d1d0288e8a4a1a4e0 Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.045478 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:09 crc kubenswrapper[4747]: E1001 06:19:09.053117 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.553088391 +0000 UTC m=+150.962745440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.109358 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:09 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:09 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:09 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.109408 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.147122 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:09 crc kubenswrapper[4747]: E1001 06:19:09.147305 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.647276956 +0000 UTC m=+151.056934005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.147418 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:09 crc kubenswrapper[4747]: E1001 06:19:09.147866 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.647854431 +0000 UTC m=+151.057511480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.248490 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:09 crc kubenswrapper[4747]: E1001 06:19:09.248676 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.748654653 +0000 UTC m=+151.158311702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.248886 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:09 crc kubenswrapper[4747]: E1001 06:19:09.249205 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.749197768 +0000 UTC m=+151.158854817 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.353168 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:09 crc kubenswrapper[4747]: E1001 06:19:09.353891 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.853878514 +0000 UTC m=+151.263535563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.454780 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:09 crc kubenswrapper[4747]: E1001 06:19:09.455058 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-01 06:19:09.955044737 +0000 UTC m=+151.364701786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-8mc8d" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.486409 4747 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-01T06:19:08.878288684Z","Handler":null,"Name":""} Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.519038 4747 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.519076 4747 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.563099 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.575025 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.581937 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.582566 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.591885 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.592208 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.601208 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.664546 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.682114 4747 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.682154 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.745627 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-8mc8d\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.767422 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/894747bb-0bd0-4382-a444-1702bfcf44e3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"894747bb-0bd0-4382-a444-1702bfcf44e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.771680 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/894747bb-0bd0-4382-a444-1702bfcf44e3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"894747bb-0bd0-4382-a444-1702bfcf44e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.872284 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/894747bb-0bd0-4382-a444-1702bfcf44e3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"894747bb-0bd0-4382-a444-1702bfcf44e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.872367 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/894747bb-0bd0-4382-a444-1702bfcf44e3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"894747bb-0bd0-4382-a444-1702bfcf44e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.872527 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/894747bb-0bd0-4382-a444-1702bfcf44e3-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"894747bb-0bd0-4382-a444-1702bfcf44e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.895471 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/894747bb-0bd0-4382-a444-1702bfcf44e3-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"894747bb-0bd0-4382-a444-1702bfcf44e3\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.917905 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4g26h" event={"ID":"d04a872f-a6a7-45d3-aa62-be934b7266c2","Type":"ContainerStarted","Data":"31a8349c31515b75de09f76e50fe000cdd8d42a7b4837408ca2d3db9fe762ea5"} Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.917957 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-4g26h" event={"ID":"d04a872f-a6a7-45d3-aa62-be934b7266c2","Type":"ContainerStarted","Data":"db5f5d16c82df1bf3cb1e297a4b90f3194fc82d1576b9c5b116b269590774878"} Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.923011 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48x76" event={"ID":"86217928-f132-414b-a374-3e20ea531035","Type":"ContainerStarted","Data":"1df9cfecc59c5ba1c5d1f5857bce28af39f0765082a60513990b345588602439"} Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.923036 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-48x76" event={"ID":"86217928-f132-414b-a374-3e20ea531035","Type":"ContainerStarted","Data":"d03bd1ca6eab3720caf8556b0467b199eab7aca61e0a3164b4dbd20b2db10a9d"} Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.925606 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.926092 4747 generic.go:334] "Generic (PLEG): container finished" podID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerID="eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181" exitCode=0 Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.926184 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkcl6" event={"ID":"0ecbf263-d2a7-4f34-8623-7806e8cacbae","Type":"ContainerDied","Data":"eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181"} Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.926324 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkcl6" event={"ID":"0ecbf263-d2a7-4f34-8623-7806e8cacbae","Type":"ContainerStarted","Data":"be0c23f0668f620442b18d56811efedaf217814d6ad7a87d1d0288e8a4a1a4e0"} Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.931256 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce","Type":"ContainerStarted","Data":"34523cb5af1cc6bda26f1c5fe0d762d41c2c13b8dd87ee6cf552a1a369397dab"} Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.954611 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.954594399 podStartE2EDuration="2.954594399s" podCreationTimestamp="2025-10-01 06:19:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:09.952643967 +0000 UTC m=+151.362301016" watchObservedRunningTime="2025-10-01 06:19:09.954594399 +0000 UTC m=+151.364251448" Oct 01 06:19:09 crc kubenswrapper[4747]: I1001 06:19:09.958030 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-4g26h" podStartSLOduration=131.958017121 podStartE2EDuration="2m11.958017121s" podCreationTimestamp="2025-10-01 06:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:09.938454927 +0000 UTC m=+151.348111976" watchObservedRunningTime="2025-10-01 06:19:09.958017121 +0000 UTC m=+151.367674170" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.007101 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.012100 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-48x76" podStartSLOduration=12.01207653 podStartE2EDuration="12.01207653s" podCreationTimestamp="2025-10-01 06:18:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:10.009477531 +0000 UTC m=+151.419134580" watchObservedRunningTime="2025-10-01 06:19:10.01207653 +0000 UTC m=+151.421733579" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.109817 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:10 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:10 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:10 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.110333 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.244992 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.384305 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5efb7422-a464-4daf-991f-808ba693495c-secret-volume\") pod \"5efb7422-a464-4daf-991f-808ba693495c\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.384386 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5efb7422-a464-4daf-991f-808ba693495c-config-volume\") pod \"5efb7422-a464-4daf-991f-808ba693495c\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.384450 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srdmv\" (UniqueName: \"kubernetes.io/projected/5efb7422-a464-4daf-991f-808ba693495c-kube-api-access-srdmv\") pod \"5efb7422-a464-4daf-991f-808ba693495c\" (UID: \"5efb7422-a464-4daf-991f-808ba693495c\") " Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.386089 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5efb7422-a464-4daf-991f-808ba693495c-config-volume" (OuterVolumeSpecName: "config-volume") pod "5efb7422-a464-4daf-991f-808ba693495c" (UID: "5efb7422-a464-4daf-991f-808ba693495c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.392452 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5efb7422-a464-4daf-991f-808ba693495c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5efb7422-a464-4daf-991f-808ba693495c" (UID: "5efb7422-a464-4daf-991f-808ba693495c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.396243 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5efb7422-a464-4daf-991f-808ba693495c-kube-api-access-srdmv" (OuterVolumeSpecName: "kube-api-access-srdmv") pod "5efb7422-a464-4daf-991f-808ba693495c" (UID: "5efb7422-a464-4daf-991f-808ba693495c"). InnerVolumeSpecName "kube-api-access-srdmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.467537 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.487684 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5efb7422-a464-4daf-991f-808ba693495c-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.487731 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5efb7422-a464-4daf-991f-808ba693495c-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.487774 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srdmv\" (UniqueName: \"kubernetes.io/projected/5efb7422-a464-4daf-991f-808ba693495c-kube-api-access-srdmv\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.560330 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8mc8d"] Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.836863 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.837218 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.842292 4747 patch_prober.go:28] interesting pod/console-f9d7485db-csw7c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.842408 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-csw7c" podUID="2702589d-bd8c-4401-a5d9-2d57c88f33f6" containerName="console" probeResult="failure" output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.951279 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"894747bb-0bd0-4382-a444-1702bfcf44e3","Type":"ContainerStarted","Data":"1e3b3a69cf5a96c5fa7bb1b3a9360d3955e701e9cd6be31dec575755b39908d2"} Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.960224 4747 generic.go:334] "Generic (PLEG): container finished" podID="f8b7e6b7-8b4b-463d-af7f-2d05529d0cce" containerID="34523cb5af1cc6bda26f1c5fe0d762d41c2c13b8dd87ee6cf552a1a369397dab" exitCode=0 Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.960383 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce","Type":"ContainerDied","Data":"34523cb5af1cc6bda26f1c5fe0d762d41c2c13b8dd87ee6cf552a1a369397dab"} Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.963992 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" event={"ID":"5efb7422-a464-4daf-991f-808ba693495c","Type":"ContainerDied","Data":"312badfd7ab7dd6b89eccd90295d8152046c5168297298d9d560773ee8177cad"} Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.964016 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="312badfd7ab7dd6b89eccd90295d8152046c5168297298d9d560773ee8177cad" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.964059 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321655-jzmsw" Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.983653 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" event={"ID":"5de5adaf-4595-446d-9b77-a48824db2dfa","Type":"ContainerStarted","Data":"d810755b1081d5705b83256f16e3da7918be00f96b001f5de206fa0af0d3e379"} Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.983701 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" event={"ID":"5de5adaf-4595-446d-9b77-a48824db2dfa","Type":"ContainerStarted","Data":"5ebfae64562aa19f8c46fad1b968dfc5f6e821b62855d0a3425667f3f8c508c1"} Oct 01 06:19:10 crc kubenswrapper[4747]: I1001 06:19:10.984778 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.012987 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" podStartSLOduration=132.012970474 podStartE2EDuration="2m12.012970474s" podCreationTimestamp="2025-10-01 06:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:11.010110838 +0000 UTC m=+152.419767897" watchObservedRunningTime="2025-10-01 06:19:11.012970474 +0000 UTC m=+152.422627513" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.104915 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.106994 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:11 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:11 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:11 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.107045 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.291454 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.553532 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-bxlnn container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.553602 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-bxlnn" podUID="e5175887-79c8-46f9-9708-d4d9afea026d" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.553537 4747 patch_prober.go:28] interesting pod/downloads-7954f5f757-bxlnn container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" start-of-body= Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.553693 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-bxlnn" podUID="e5175887-79c8-46f9-9708-d4d9afea026d" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.32:8080/\": dial tcp 10.217.0.32:8080: connect: connection refused" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.872495 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.872560 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:11 crc kubenswrapper[4747]: I1001 06:19:11.879232 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.008366 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"894747bb-0bd0-4382-a444-1702bfcf44e3","Type":"ContainerStarted","Data":"fae90d8dc6e927e7254cb4a498397e8aa3108d5212050bcc85653081a78b8baa"} Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.012966 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-qnv2z" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.026115 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.026083076 podStartE2EDuration="3.026083076s" podCreationTimestamp="2025-10-01 06:19:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:19:12.022189482 +0000 UTC m=+153.431846531" watchObservedRunningTime="2025-10-01 06:19:12.026083076 +0000 UTC m=+153.435740125" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.094475 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.094887 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.102775 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.108094 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:12 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:12 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:12 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.108156 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.456335 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.533218 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kube-api-access\") pod \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\" (UID: \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\") " Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.533273 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kubelet-dir\") pod \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\" (UID: \"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce\") " Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.533526 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f8b7e6b7-8b4b-463d-af7f-2d05529d0cce" (UID: "f8b7e6b7-8b4b-463d-af7f-2d05529d0cce"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.553169 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f8b7e6b7-8b4b-463d-af7f-2d05529d0cce" (UID: "f8b7e6b7-8b4b-463d-af7f-2d05529d0cce"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.634885 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:12 crc kubenswrapper[4747]: I1001 06:19:12.634933 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f8b7e6b7-8b4b-463d-af7f-2d05529d0cce-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:13 crc kubenswrapper[4747]: I1001 06:19:13.037872 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f8b7e6b7-8b4b-463d-af7f-2d05529d0cce","Type":"ContainerDied","Data":"d93cc386d8c60fca8607507affda16887afd73503998202bc1ca90b563e2d7a2"} Oct 01 06:19:13 crc kubenswrapper[4747]: I1001 06:19:13.037912 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d93cc386d8c60fca8607507affda16887afd73503998202bc1ca90b563e2d7a2" Oct 01 06:19:13 crc kubenswrapper[4747]: I1001 06:19:13.037972 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 01 06:19:13 crc kubenswrapper[4747]: I1001 06:19:13.043820 4747 generic.go:334] "Generic (PLEG): container finished" podID="894747bb-0bd0-4382-a444-1702bfcf44e3" containerID="fae90d8dc6e927e7254cb4a498397e8aa3108d5212050bcc85653081a78b8baa" exitCode=0 Oct 01 06:19:13 crc kubenswrapper[4747]: I1001 06:19:13.044184 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"894747bb-0bd0-4382-a444-1702bfcf44e3","Type":"ContainerDied","Data":"fae90d8dc6e927e7254cb4a498397e8aa3108d5212050bcc85653081a78b8baa"} Oct 01 06:19:13 crc kubenswrapper[4747]: I1001 06:19:13.071848 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-8nbjz" Oct 01 06:19:13 crc kubenswrapper[4747]: I1001 06:19:13.109435 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:13 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:13 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:13 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:13 crc kubenswrapper[4747]: I1001 06:19:13.109826 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.106957 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:14 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:14 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:14 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.107032 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.489772 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.562465 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/894747bb-0bd0-4382-a444-1702bfcf44e3-kubelet-dir\") pod \"894747bb-0bd0-4382-a444-1702bfcf44e3\" (UID: \"894747bb-0bd0-4382-a444-1702bfcf44e3\") " Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.562580 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/894747bb-0bd0-4382-a444-1702bfcf44e3-kube-api-access\") pod \"894747bb-0bd0-4382-a444-1702bfcf44e3\" (UID: \"894747bb-0bd0-4382-a444-1702bfcf44e3\") " Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.563299 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/894747bb-0bd0-4382-a444-1702bfcf44e3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "894747bb-0bd0-4382-a444-1702bfcf44e3" (UID: "894747bb-0bd0-4382-a444-1702bfcf44e3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.568028 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894747bb-0bd0-4382-a444-1702bfcf44e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "894747bb-0bd0-4382-a444-1702bfcf44e3" (UID: "894747bb-0bd0-4382-a444-1702bfcf44e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.663865 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/894747bb-0bd0-4382-a444-1702bfcf44e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:14 crc kubenswrapper[4747]: I1001 06:19:14.663903 4747 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/894747bb-0bd0-4382-a444-1702bfcf44e3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:15 crc kubenswrapper[4747]: I1001 06:19:15.061784 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 01 06:19:15 crc kubenswrapper[4747]: I1001 06:19:15.061826 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"894747bb-0bd0-4382-a444-1702bfcf44e3","Type":"ContainerDied","Data":"1e3b3a69cf5a96c5fa7bb1b3a9360d3955e701e9cd6be31dec575755b39908d2"} Oct 01 06:19:15 crc kubenswrapper[4747]: I1001 06:19:15.061934 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e3b3a69cf5a96c5fa7bb1b3a9360d3955e701e9cd6be31dec575755b39908d2" Oct 01 06:19:15 crc kubenswrapper[4747]: I1001 06:19:15.106583 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:15 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:15 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:15 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:15 crc kubenswrapper[4747]: I1001 06:19:15.106683 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:16 crc kubenswrapper[4747]: I1001 06:19:16.105584 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:16 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:16 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:16 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:16 crc kubenswrapper[4747]: I1001 06:19:16.105919 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:16 crc kubenswrapper[4747]: I1001 06:19:16.856662 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-92b7k" Oct 01 06:19:17 crc kubenswrapper[4747]: I1001 06:19:17.106732 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:17 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:17 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:17 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:17 crc kubenswrapper[4747]: I1001 06:19:17.106841 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:18 crc kubenswrapper[4747]: I1001 06:19:18.107206 4747 patch_prober.go:28] interesting pod/router-default-5444994796-5nq8g container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 01 06:19:18 crc kubenswrapper[4747]: [-]has-synced failed: reason withheld Oct 01 06:19:18 crc kubenswrapper[4747]: [+]process-running ok Oct 01 06:19:18 crc kubenswrapper[4747]: healthz check failed Oct 01 06:19:18 crc kubenswrapper[4747]: I1001 06:19:18.107514 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5nq8g" podUID="0b633eab-bed0-436d-ad6d-bd7f315dc172" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 01 06:19:19 crc kubenswrapper[4747]: I1001 06:19:19.106724 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:19:19 crc kubenswrapper[4747]: I1001 06:19:19.109272 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-5nq8g" Oct 01 06:19:20 crc kubenswrapper[4747]: I1001 06:19:20.837535 4747 patch_prober.go:28] interesting pod/console-f9d7485db-csw7c container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Oct 01 06:19:20 crc kubenswrapper[4747]: I1001 06:19:20.838154 4747 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-csw7c" podUID="2702589d-bd8c-4401-a5d9-2d57c88f33f6" containerName="console" probeResult="failure" output="Get \"https://10.217.0.6:8443/health\": dial tcp 10.217.0.6:8443: connect: connection refused" Oct 01 06:19:21 crc kubenswrapper[4747]: I1001 06:19:21.575895 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-bxlnn" Oct 01 06:19:30 crc kubenswrapper[4747]: I1001 06:19:30.034167 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:19:30 crc kubenswrapper[4747]: I1001 06:19:30.841417 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:19:30 crc kubenswrapper[4747]: I1001 06:19:30.846446 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-csw7c" Oct 01 06:19:35 crc kubenswrapper[4747]: E1001 06:19:35.524582 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 01 06:19:35 crc kubenswrapper[4747]: E1001 06:19:35.525236 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rwtzg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-r2t26_openshift-marketplace(98569525-9a56-4701-9a06-46ac13cb40c3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 06:19:35 crc kubenswrapper[4747]: E1001 06:19:35.526873 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-r2t26" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" Oct 01 06:19:35 crc kubenswrapper[4747]: E1001 06:19:35.562463 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 01 06:19:35 crc kubenswrapper[4747]: E1001 06:19:35.562710 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mpfps,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-nsqw9_openshift-marketplace(f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 06:19:35 crc kubenswrapper[4747]: E1001 06:19:35.563957 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-nsqw9" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" Oct 01 06:19:35 crc kubenswrapper[4747]: I1001 06:19:35.761200 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:19:35 crc kubenswrapper[4747]: I1001 06:19:35.761263 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:19:36 crc kubenswrapper[4747]: E1001 06:19:36.867071 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-r2t26" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" Oct 01 06:19:36 crc kubenswrapper[4747]: E1001 06:19:36.867137 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-nsqw9" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" Oct 01 06:19:36 crc kubenswrapper[4747]: E1001 06:19:36.958950 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 01 06:19:36 crc kubenswrapper[4747]: E1001 06:19:36.959079 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k5nhk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-899r6_openshift-marketplace(fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 01 06:19:36 crc kubenswrapper[4747]: E1001 06:19:36.960237 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-899r6" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" Oct 01 06:19:37 crc kubenswrapper[4747]: I1001 06:19:37.243394 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w8ltr" event={"ID":"8e6ee5ef-246c-41dd-a003-d590e1e1119b","Type":"ContainerStarted","Data":"c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364"} Oct 01 06:19:37 crc kubenswrapper[4747]: I1001 06:19:37.247978 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ss76n" event={"ID":"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c","Type":"ContainerStarted","Data":"2b1ab3428cd0a3dbc720807e410f0d51be586e4babd98401d9105c6ce29a942d"} Oct 01 06:19:37 crc kubenswrapper[4747]: I1001 06:19:37.250308 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gdmv" event={"ID":"055f5415-eb0a-47cd-9f51-9fe5751b890a","Type":"ContainerStarted","Data":"8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122"} Oct 01 06:19:37 crc kubenswrapper[4747]: I1001 06:19:37.254503 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hnkg5" event={"ID":"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c","Type":"ContainerStarted","Data":"9489eceb4f77f2d451beaa50e2c9473a1ef1b46829c95a9fb390174e47ff4b36"} Oct 01 06:19:37 crc kubenswrapper[4747]: E1001 06:19:37.275019 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-899r6" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.266069 4747 generic.go:334] "Generic (PLEG): container finished" podID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerID="c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364" exitCode=0 Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.266462 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w8ltr" event={"ID":"8e6ee5ef-246c-41dd-a003-d590e1e1119b","Type":"ContainerDied","Data":"c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364"} Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.280481 4747 generic.go:334] "Generic (PLEG): container finished" podID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerID="2b1ab3428cd0a3dbc720807e410f0d51be586e4babd98401d9105c6ce29a942d" exitCode=0 Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.280844 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ss76n" event={"ID":"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c","Type":"ContainerDied","Data":"2b1ab3428cd0a3dbc720807e410f0d51be586e4babd98401d9105c6ce29a942d"} Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.283668 4747 generic.go:334] "Generic (PLEG): container finished" podID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerID="8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122" exitCode=0 Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.283738 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gdmv" event={"ID":"055f5415-eb0a-47cd-9f51-9fe5751b890a","Type":"ContainerDied","Data":"8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122"} Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.290444 4747 generic.go:334] "Generic (PLEG): container finished" podID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerID="2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525" exitCode=0 Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.290573 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkcl6" event={"ID":"0ecbf263-d2a7-4f34-8623-7806e8cacbae","Type":"ContainerDied","Data":"2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525"} Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.306021 4747 generic.go:334] "Generic (PLEG): container finished" podID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerID="9489eceb4f77f2d451beaa50e2c9473a1ef1b46829c95a9fb390174e47ff4b36" exitCode=0 Oct 01 06:19:38 crc kubenswrapper[4747]: I1001 06:19:38.306080 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hnkg5" event={"ID":"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c","Type":"ContainerDied","Data":"9489eceb4f77f2d451beaa50e2c9473a1ef1b46829c95a9fb390174e47ff4b36"} Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.315971 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w8ltr" event={"ID":"8e6ee5ef-246c-41dd-a003-d590e1e1119b","Type":"ContainerStarted","Data":"9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71"} Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.318739 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ss76n" event={"ID":"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c","Type":"ContainerStarted","Data":"72c3f755c3d6f0795a7235569f8e5f58bd1b01711b86efe4b74902229da1402a"} Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.321346 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gdmv" event={"ID":"055f5415-eb0a-47cd-9f51-9fe5751b890a","Type":"ContainerStarted","Data":"04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe"} Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.323224 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkcl6" event={"ID":"0ecbf263-d2a7-4f34-8623-7806e8cacbae","Type":"ContainerStarted","Data":"232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b"} Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.325645 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hnkg5" event={"ID":"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c","Type":"ContainerStarted","Data":"d836a687341c71a8dad21ba46216ff2d5d578b7472645a3c729e64d19012b5ed"} Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.335777 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w8ltr" podStartSLOduration=3.839892745 podStartE2EDuration="35.335759717s" podCreationTimestamp="2025-10-01 06:19:04 +0000 UTC" firstStartedPulling="2025-10-01 06:19:07.461301875 +0000 UTC m=+148.870958924" lastFinishedPulling="2025-10-01 06:19:38.957168857 +0000 UTC m=+180.366825896" observedRunningTime="2025-10-01 06:19:39.332446218 +0000 UTC m=+180.742103267" watchObservedRunningTime="2025-10-01 06:19:39.335759717 +0000 UTC m=+180.745416766" Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.360329 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rkcl6" podStartSLOduration=3.560203066 podStartE2EDuration="32.360306495s" podCreationTimestamp="2025-10-01 06:19:07 +0000 UTC" firstStartedPulling="2025-10-01 06:19:09.928606563 +0000 UTC m=+151.338263602" lastFinishedPulling="2025-10-01 06:19:38.728709982 +0000 UTC m=+180.138367031" observedRunningTime="2025-10-01 06:19:39.352918087 +0000 UTC m=+180.762575136" watchObservedRunningTime="2025-10-01 06:19:39.360306495 +0000 UTC m=+180.769963544" Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.374066 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-ss76n" podStartSLOduration=4.233185928 podStartE2EDuration="35.374050933s" podCreationTimestamp="2025-10-01 06:19:04 +0000 UTC" firstStartedPulling="2025-10-01 06:19:07.518650672 +0000 UTC m=+148.928307721" lastFinishedPulling="2025-10-01 06:19:38.659515677 +0000 UTC m=+180.069172726" observedRunningTime="2025-10-01 06:19:39.371741292 +0000 UTC m=+180.781398341" watchObservedRunningTime="2025-10-01 06:19:39.374050933 +0000 UTC m=+180.783707982" Oct 01 06:19:39 crc kubenswrapper[4747]: I1001 06:19:39.389395 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2gdmv" podStartSLOduration=2.475243668 podStartE2EDuration="32.389377594s" podCreationTimestamp="2025-10-01 06:19:07 +0000 UTC" firstStartedPulling="2025-10-01 06:19:08.871736678 +0000 UTC m=+150.281393727" lastFinishedPulling="2025-10-01 06:19:38.785870604 +0000 UTC m=+180.195527653" observedRunningTime="2025-10-01 06:19:39.38623828 +0000 UTC m=+180.795895319" watchObservedRunningTime="2025-10-01 06:19:39.389377594 +0000 UTC m=+180.799034643" Oct 01 06:19:42 crc kubenswrapper[4747]: I1001 06:19:42.093821 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-q6jpz" Oct 01 06:19:42 crc kubenswrapper[4747]: I1001 06:19:42.123970 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hnkg5" podStartSLOduration=6.682968087 podStartE2EDuration="38.123952158s" podCreationTimestamp="2025-10-01 06:19:04 +0000 UTC" firstStartedPulling="2025-10-01 06:19:07.375644838 +0000 UTC m=+148.785301897" lastFinishedPulling="2025-10-01 06:19:38.816628919 +0000 UTC m=+180.226285968" observedRunningTime="2025-10-01 06:19:39.409606077 +0000 UTC m=+180.819263126" watchObservedRunningTime="2025-10-01 06:19:42.123952158 +0000 UTC m=+183.533609217" Oct 01 06:19:44 crc kubenswrapper[4747]: I1001 06:19:44.716400 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:44 crc kubenswrapper[4747]: I1001 06:19:44.716861 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:44 crc kubenswrapper[4747]: I1001 06:19:44.932878 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:44 crc kubenswrapper[4747]: I1001 06:19:44.932984 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:44 crc kubenswrapper[4747]: I1001 06:19:44.969245 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:44 crc kubenswrapper[4747]: I1001 06:19:44.998871 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:45 crc kubenswrapper[4747]: I1001 06:19:45.087988 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:45 crc kubenswrapper[4747]: I1001 06:19:45.088113 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:45 crc kubenswrapper[4747]: I1001 06:19:45.157919 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:45 crc kubenswrapper[4747]: I1001 06:19:45.440556 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:45 crc kubenswrapper[4747]: I1001 06:19:45.442343 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:19:45 crc kubenswrapper[4747]: I1001 06:19:45.464375 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:46 crc kubenswrapper[4747]: I1001 06:19:46.119905 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ss76n"] Oct 01 06:19:46 crc kubenswrapper[4747]: I1001 06:19:46.832413 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 01 06:19:47 crc kubenswrapper[4747]: I1001 06:19:47.403645 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-ss76n" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerName="registry-server" containerID="cri-o://72c3f755c3d6f0795a7235569f8e5f58bd1b01711b86efe4b74902229da1402a" gracePeriod=2 Oct 01 06:19:47 crc kubenswrapper[4747]: I1001 06:19:47.518535 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hnkg5"] Oct 01 06:19:47 crc kubenswrapper[4747]: I1001 06:19:47.518812 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hnkg5" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerName="registry-server" containerID="cri-o://d836a687341c71a8dad21ba46216ff2d5d578b7472645a3c729e64d19012b5ed" gracePeriod=2 Oct 01 06:19:47 crc kubenswrapper[4747]: I1001 06:19:47.718138 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:47 crc kubenswrapper[4747]: I1001 06:19:47.718207 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:47 crc kubenswrapper[4747]: I1001 06:19:47.791029 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.003953 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.004024 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.082548 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.420400 4747 generic.go:334] "Generic (PLEG): container finished" podID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerID="d836a687341c71a8dad21ba46216ff2d5d578b7472645a3c729e64d19012b5ed" exitCode=0 Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.420527 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hnkg5" event={"ID":"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c","Type":"ContainerDied","Data":"d836a687341c71a8dad21ba46216ff2d5d578b7472645a3c729e64d19012b5ed"} Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.423099 4747 generic.go:334] "Generic (PLEG): container finished" podID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerID="72c3f755c3d6f0795a7235569f8e5f58bd1b01711b86efe4b74902229da1402a" exitCode=0 Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.423151 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ss76n" event={"ID":"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c","Type":"ContainerDied","Data":"72c3f755c3d6f0795a7235569f8e5f58bd1b01711b86efe4b74902229da1402a"} Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.468605 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:48 crc kubenswrapper[4747]: I1001 06:19:48.474469 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.542917 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.556915 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.616720 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-catalog-content\") pod \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.616800 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-catalog-content\") pod \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.616841 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-utilities\") pod \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.616869 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzhmj\" (UniqueName: \"kubernetes.io/projected/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-kube-api-access-fzhmj\") pod \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\" (UID: \"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c\") " Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.616909 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-utilities\") pod \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.618059 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-utilities" (OuterVolumeSpecName: "utilities") pod "ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" (UID: "ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.619007 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-utilities" (OuterVolumeSpecName: "utilities") pod "b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" (UID: "b59cfffc-7d07-436d-bd34-6c7e3aa0da3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.632809 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-kube-api-access-fzhmj" (OuterVolumeSpecName: "kube-api-access-fzhmj") pod "b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" (UID: "b59cfffc-7d07-436d-bd34-6c7e3aa0da3c"). InnerVolumeSpecName "kube-api-access-fzhmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.677149 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" (UID: "b59cfffc-7d07-436d-bd34-6c7e3aa0da3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.692579 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" (UID: "ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.717778 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwb6w\" (UniqueName: \"kubernetes.io/projected/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-kube-api-access-hwb6w\") pod \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\" (UID: \"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c\") " Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.718040 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.718059 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.718070 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.718080 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzhmj\" (UniqueName: \"kubernetes.io/projected/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c-kube-api-access-fzhmj\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.718090 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.720979 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-kube-api-access-hwb6w" (OuterVolumeSpecName: "kube-api-access-hwb6w") pod "ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" (UID: "ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c"). InnerVolumeSpecName "kube-api-access-hwb6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.819541 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwb6w\" (UniqueName: \"kubernetes.io/projected/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c-kube-api-access-hwb6w\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.917854 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rkcl6"] Oct 01 06:19:51 crc kubenswrapper[4747]: I1001 06:19:51.918126 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rkcl6" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerName="registry-server" containerID="cri-o://232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b" gracePeriod=2 Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.228240 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.426119 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-utilities\") pod \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.426266 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-catalog-content\") pod \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.426320 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prsbr\" (UniqueName: \"kubernetes.io/projected/0ecbf263-d2a7-4f34-8623-7806e8cacbae-kube-api-access-prsbr\") pod \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\" (UID: \"0ecbf263-d2a7-4f34-8623-7806e8cacbae\") " Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.427492 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-utilities" (OuterVolumeSpecName: "utilities") pod "0ecbf263-d2a7-4f34-8623-7806e8cacbae" (UID: "0ecbf263-d2a7-4f34-8623-7806e8cacbae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.434979 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ecbf263-d2a7-4f34-8623-7806e8cacbae-kube-api-access-prsbr" (OuterVolumeSpecName: "kube-api-access-prsbr") pod "0ecbf263-d2a7-4f34-8623-7806e8cacbae" (UID: "0ecbf263-d2a7-4f34-8623-7806e8cacbae"). InnerVolumeSpecName "kube-api-access-prsbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.469549 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hnkg5" event={"ID":"ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c","Type":"ContainerDied","Data":"addff842755337d677f3d0282a9f930f7d3ca3ca0ec625cb5ba327191e89001a"} Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.469607 4747 scope.go:117] "RemoveContainer" containerID="d836a687341c71a8dad21ba46216ff2d5d578b7472645a3c729e64d19012b5ed" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.469599 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hnkg5" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.471269 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerID="0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be" exitCode=0 Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.471338 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsqw9" event={"ID":"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5","Type":"ContainerDied","Data":"0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be"} Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.474606 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-ss76n" event={"ID":"b59cfffc-7d07-436d-bd34-6c7e3aa0da3c","Type":"ContainerDied","Data":"fdd4c0125d9734726991fd1b84f42acbdc66436eecf50a2c2bb5f5e244efa7b7"} Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.474672 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-ss76n" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.483695 4747 generic.go:334] "Generic (PLEG): container finished" podID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerID="232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b" exitCode=0 Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.484064 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkcl6" event={"ID":"0ecbf263-d2a7-4f34-8623-7806e8cacbae","Type":"ContainerDied","Data":"232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b"} Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.484415 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rkcl6" event={"ID":"0ecbf263-d2a7-4f34-8623-7806e8cacbae","Type":"ContainerDied","Data":"be0c23f0668f620442b18d56811efedaf217814d6ad7a87d1d0288e8a4a1a4e0"} Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.484305 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rkcl6" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.523525 4747 scope.go:117] "RemoveContainer" containerID="9489eceb4f77f2d451beaa50e2c9473a1ef1b46829c95a9fb390174e47ff4b36" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.532829 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-ss76n"] Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.534985 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prsbr\" (UniqueName: \"kubernetes.io/projected/0ecbf263-d2a7-4f34-8623-7806e8cacbae-kube-api-access-prsbr\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.535028 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.538163 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-ss76n"] Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.552922 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hnkg5"] Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.559256 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hnkg5"] Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.562927 4747 scope.go:117] "RemoveContainer" containerID="3b007c13927366562a6344f72bd0d0e07e21e5b870c77fa230d49840aa46f18a" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.584196 4747 scope.go:117] "RemoveContainer" containerID="72c3f755c3d6f0795a7235569f8e5f58bd1b01711b86efe4b74902229da1402a" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.591373 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0ecbf263-d2a7-4f34-8623-7806e8cacbae" (UID: "0ecbf263-d2a7-4f34-8623-7806e8cacbae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.612973 4747 scope.go:117] "RemoveContainer" containerID="2b1ab3428cd0a3dbc720807e410f0d51be586e4babd98401d9105c6ce29a942d" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.629896 4747 scope.go:117] "RemoveContainer" containerID="3e245d60333a68499d2a44cbda22237b61d189f0a516a0a077d48c1cff8c85a6" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.635980 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0ecbf263-d2a7-4f34-8623-7806e8cacbae-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.652903 4747 scope.go:117] "RemoveContainer" containerID="232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.666517 4747 scope.go:117] "RemoveContainer" containerID="2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.681787 4747 scope.go:117] "RemoveContainer" containerID="eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.696492 4747 scope.go:117] "RemoveContainer" containerID="232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b" Oct 01 06:19:52 crc kubenswrapper[4747]: E1001 06:19:52.697105 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b\": container with ID starting with 232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b not found: ID does not exist" containerID="232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.697165 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b"} err="failed to get container status \"232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b\": rpc error: code = NotFound desc = could not find container \"232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b\": container with ID starting with 232b7af75e46586e53d4fd8d31453e1fbc7400613fc8cfc02841e94c8e97201b not found: ID does not exist" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.697216 4747 scope.go:117] "RemoveContainer" containerID="2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525" Oct 01 06:19:52 crc kubenswrapper[4747]: E1001 06:19:52.697597 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525\": container with ID starting with 2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525 not found: ID does not exist" containerID="2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.697631 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525"} err="failed to get container status \"2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525\": rpc error: code = NotFound desc = could not find container \"2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525\": container with ID starting with 2c22d10c7dd421a67d1ccb4a70abae66a0934bc5581adbeedbfbaead11ae7525 not found: ID does not exist" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.697691 4747 scope.go:117] "RemoveContainer" containerID="eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181" Oct 01 06:19:52 crc kubenswrapper[4747]: E1001 06:19:52.698078 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181\": container with ID starting with eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181 not found: ID does not exist" containerID="eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.698108 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181"} err="failed to get container status \"eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181\": rpc error: code = NotFound desc = could not find container \"eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181\": container with ID starting with eff9ca38405af0e70b3c4b53cf0ca0fe9777bb1acd02d8be957825f2c4dd4181 not found: ID does not exist" Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.842520 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rkcl6"] Oct 01 06:19:52 crc kubenswrapper[4747]: I1001 06:19:52.851623 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rkcl6"] Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.286740 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" path="/var/lib/kubelet/pods/0ecbf263-d2a7-4f34-8623-7806e8cacbae/volumes" Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.288146 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" path="/var/lib/kubelet/pods/b59cfffc-7d07-436d-bd34-6c7e3aa0da3c/volumes" Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.288873 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" path="/var/lib/kubelet/pods/ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c/volumes" Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.502088 4747 generic.go:334] "Generic (PLEG): container finished" podID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerID="0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333" exitCode=0 Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.502162 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899r6" event={"ID":"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e","Type":"ContainerDied","Data":"0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333"} Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.514572 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsqw9" event={"ID":"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5","Type":"ContainerStarted","Data":"ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de"} Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.521304 4747 generic.go:334] "Generic (PLEG): container finished" podID="98569525-9a56-4701-9a06-46ac13cb40c3" containerID="9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82" exitCode=0 Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.521350 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r2t26" event={"ID":"98569525-9a56-4701-9a06-46ac13cb40c3","Type":"ContainerDied","Data":"9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82"} Oct 01 06:19:53 crc kubenswrapper[4747]: I1001 06:19:53.561884 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nsqw9" podStartSLOduration=3.585331509 podStartE2EDuration="47.561860399s" podCreationTimestamp="2025-10-01 06:19:06 +0000 UTC" firstStartedPulling="2025-10-01 06:19:08.913681853 +0000 UTC m=+150.323338902" lastFinishedPulling="2025-10-01 06:19:52.890210733 +0000 UTC m=+194.299867792" observedRunningTime="2025-10-01 06:19:53.55664827 +0000 UTC m=+194.966305319" watchObservedRunningTime="2025-10-01 06:19:53.561860399 +0000 UTC m=+194.971517478" Oct 01 06:19:54 crc kubenswrapper[4747]: I1001 06:19:54.528393 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r2t26" event={"ID":"98569525-9a56-4701-9a06-46ac13cb40c3","Type":"ContainerStarted","Data":"e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361"} Oct 01 06:19:54 crc kubenswrapper[4747]: I1001 06:19:54.529906 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899r6" event={"ID":"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e","Type":"ContainerStarted","Data":"d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb"} Oct 01 06:19:54 crc kubenswrapper[4747]: I1001 06:19:54.563898 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r2t26" podStartSLOduration=3.505256743 podStartE2EDuration="48.563879853s" podCreationTimestamp="2025-10-01 06:19:06 +0000 UTC" firstStartedPulling="2025-10-01 06:19:08.87851189 +0000 UTC m=+150.288168939" lastFinishedPulling="2025-10-01 06:19:53.93713496 +0000 UTC m=+195.346792049" observedRunningTime="2025-10-01 06:19:54.54922818 +0000 UTC m=+195.958885229" watchObservedRunningTime="2025-10-01 06:19:54.563879853 +0000 UTC m=+195.973536902" Oct 01 06:19:54 crc kubenswrapper[4747]: I1001 06:19:54.565943 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-899r6" podStartSLOduration=4.238773177 podStartE2EDuration="50.565937358s" podCreationTimestamp="2025-10-01 06:19:04 +0000 UTC" firstStartedPulling="2025-10-01 06:19:07.682167395 +0000 UTC m=+149.091824444" lastFinishedPulling="2025-10-01 06:19:54.009331576 +0000 UTC m=+195.418988625" observedRunningTime="2025-10-01 06:19:54.560704068 +0000 UTC m=+195.970361117" watchObservedRunningTime="2025-10-01 06:19:54.565937358 +0000 UTC m=+195.975594407" Oct 01 06:19:56 crc kubenswrapper[4747]: I1001 06:19:56.698581 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:56 crc kubenswrapper[4747]: I1001 06:19:56.700294 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:56 crc kubenswrapper[4747]: I1001 06:19:56.757595 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:19:57 crc kubenswrapper[4747]: I1001 06:19:57.039052 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:57 crc kubenswrapper[4747]: I1001 06:19:57.039124 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:19:57 crc kubenswrapper[4747]: I1001 06:19:57.084269 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:20:04 crc kubenswrapper[4747]: I1001 06:20:04.501458 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:20:04 crc kubenswrapper[4747]: I1001 06:20:04.502206 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:20:04 crc kubenswrapper[4747]: I1001 06:20:04.547576 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:20:04 crc kubenswrapper[4747]: I1001 06:20:04.626218 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:20:05 crc kubenswrapper[4747]: I1001 06:20:05.761410 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:20:05 crc kubenswrapper[4747]: I1001 06:20:05.761495 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:20:05 crc kubenswrapper[4747]: I1001 06:20:05.761564 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:20:05 crc kubenswrapper[4747]: I1001 06:20:05.762493 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721"} pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:20:05 crc kubenswrapper[4747]: I1001 06:20:05.762580 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" containerID="cri-o://f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721" gracePeriod=600 Oct 01 06:20:06 crc kubenswrapper[4747]: I1001 06:20:06.607129 4747 generic.go:334] "Generic (PLEG): container finished" podID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerID="f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721" exitCode=0 Oct 01 06:20:06 crc kubenswrapper[4747]: I1001 06:20:06.607275 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerDied","Data":"f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721"} Oct 01 06:20:06 crc kubenswrapper[4747]: I1001 06:20:06.608700 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"b376cd219b804446b36aa80999cb1cd10015c3499940e5191b5df36c2cb92f37"} Oct 01 06:20:06 crc kubenswrapper[4747]: I1001 06:20:06.773537 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:20:07 crc kubenswrapper[4747]: I1001 06:20:07.105806 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:20:07 crc kubenswrapper[4747]: I1001 06:20:07.746691 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsqw9"] Oct 01 06:20:07 crc kubenswrapper[4747]: I1001 06:20:07.747366 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nsqw9" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerName="registry-server" containerID="cri-o://ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de" gracePeriod=2 Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.109492 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.228067 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-catalog-content\") pod \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.228170 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpfps\" (UniqueName: \"kubernetes.io/projected/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-kube-api-access-mpfps\") pod \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.228235 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-utilities\") pod \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\" (UID: \"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5\") " Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.229903 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-utilities" (OuterVolumeSpecName: "utilities") pod "f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" (UID: "f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.231393 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.237855 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-kube-api-access-mpfps" (OuterVolumeSpecName: "kube-api-access-mpfps") pod "f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" (UID: "f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5"). InnerVolumeSpecName "kube-api-access-mpfps". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.243537 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" (UID: "f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.333194 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.333248 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpfps\" (UniqueName: \"kubernetes.io/projected/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5-kube-api-access-mpfps\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.623914 4747 generic.go:334] "Generic (PLEG): container finished" podID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerID="ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de" exitCode=0 Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.623976 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsqw9" event={"ID":"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5","Type":"ContainerDied","Data":"ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de"} Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.624009 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nsqw9" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.624036 4747 scope.go:117] "RemoveContainer" containerID="ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.624018 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nsqw9" event={"ID":"f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5","Type":"ContainerDied","Data":"f6e41ebdf9622ff35c16cea6620bab90c0637d83e6b7d8e9a062db5feab715c7"} Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.645412 4747 scope.go:117] "RemoveContainer" containerID="0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.666233 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsqw9"] Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.671061 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nsqw9"] Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.681473 4747 scope.go:117] "RemoveContainer" containerID="6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.693626 4747 scope.go:117] "RemoveContainer" containerID="ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de" Oct 01 06:20:08 crc kubenswrapper[4747]: E1001 06:20:08.694128 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de\": container with ID starting with ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de not found: ID does not exist" containerID="ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.694167 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de"} err="failed to get container status \"ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de\": rpc error: code = NotFound desc = could not find container \"ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de\": container with ID starting with ee6a10e74f08ed387a11306cc59459f5aa1d8c615eee8cf5ec0ab8ee6227e1de not found: ID does not exist" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.694192 4747 scope.go:117] "RemoveContainer" containerID="0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be" Oct 01 06:20:08 crc kubenswrapper[4747]: E1001 06:20:08.694606 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be\": container with ID starting with 0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be not found: ID does not exist" containerID="0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.694626 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be"} err="failed to get container status \"0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be\": rpc error: code = NotFound desc = could not find container \"0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be\": container with ID starting with 0cb90376bb751d8aecfadb205a2aa32cb57903e494fcb179821192cf746fa3be not found: ID does not exist" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.694639 4747 scope.go:117] "RemoveContainer" containerID="6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7" Oct 01 06:20:08 crc kubenswrapper[4747]: E1001 06:20:08.694892 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7\": container with ID starting with 6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7 not found: ID does not exist" containerID="6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7" Oct 01 06:20:08 crc kubenswrapper[4747]: I1001 06:20:08.694925 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7"} err="failed to get container status \"6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7\": rpc error: code = NotFound desc = could not find container \"6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7\": container with ID starting with 6d763de3641aa1a195cfa594f73724b24a25aa945695fdeb46d3ed225c0b2ff7 not found: ID does not exist" Oct 01 06:20:09 crc kubenswrapper[4747]: I1001 06:20:09.285248 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" path="/var/lib/kubelet/pods/f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5/volumes" Oct 01 06:20:10 crc kubenswrapper[4747]: I1001 06:20:10.451144 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q2zvk"] Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.477315 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" podUID="b35796dc-d78b-48ec-be7b-45d072b20fe8" containerName="oauth-openshift" containerID="cri-o://fa24d3dc2abc747aeef354d36b2df016adceb90dfbc1148cb62f5560f013dfa8" gracePeriod=15 Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.812946 4747 generic.go:334] "Generic (PLEG): container finished" podID="b35796dc-d78b-48ec-be7b-45d072b20fe8" containerID="fa24d3dc2abc747aeef354d36b2df016adceb90dfbc1148cb62f5560f013dfa8" exitCode=0 Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.813214 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" event={"ID":"b35796dc-d78b-48ec-be7b-45d072b20fe8","Type":"ContainerDied","Data":"fa24d3dc2abc747aeef354d36b2df016adceb90dfbc1148cb62f5560f013dfa8"} Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.913638 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942154 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-57866998d-sbnth"] Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942419 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942432 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942444 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerName="extract-utilities" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942451 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerName="extract-utilities" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942462 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942470 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942482 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerName="extract-utilities" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942490 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerName="extract-utilities" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942500 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5efb7422-a464-4daf-991f-808ba693495c" containerName="collect-profiles" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942507 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5efb7422-a464-4daf-991f-808ba693495c" containerName="collect-profiles" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942516 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerName="extract-content" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942523 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerName="extract-content" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942533 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942540 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942552 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerName="extract-utilities" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942560 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerName="extract-utilities" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942570 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b35796dc-d78b-48ec-be7b-45d072b20fe8" containerName="oauth-openshift" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942577 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b35796dc-d78b-48ec-be7b-45d072b20fe8" containerName="oauth-openshift" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942589 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8b7e6b7-8b4b-463d-af7f-2d05529d0cce" containerName="pruner" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942595 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8b7e6b7-8b4b-463d-af7f-2d05529d0cce" containerName="pruner" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942606 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942615 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942624 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerName="extract-content" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942631 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerName="extract-content" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942643 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894747bb-0bd0-4382-a444-1702bfcf44e3" containerName="pruner" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942649 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="894747bb-0bd0-4382-a444-1702bfcf44e3" containerName="pruner" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942658 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerName="extract-utilities" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942665 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerName="extract-utilities" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942675 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerName="extract-content" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942683 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerName="extract-content" Oct 01 06:20:35 crc kubenswrapper[4747]: E1001 06:20:35.942694 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerName="extract-content" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942703 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerName="extract-content" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942870 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4c61ec4-94c9-4b61-b7a1-b83b906b6ed5" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942888 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8b7e6b7-8b4b-463d-af7f-2d05529d0cce" containerName="pruner" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942898 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="894747bb-0bd0-4382-a444-1702bfcf44e3" containerName="pruner" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942908 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ecbf263-d2a7-4f34-8623-7806e8cacbae" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942917 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b35796dc-d78b-48ec-be7b-45d072b20fe8" containerName="oauth-openshift" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942925 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5efb7422-a464-4daf-991f-808ba693495c" containerName="collect-profiles" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942935 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef9573f8-3a36-469b-b82a-1f7cb6bf7b8c" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.942944 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b59cfffc-7d07-436d-bd34-6c7e3aa0da3c" containerName="registry-server" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.943471 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:35 crc kubenswrapper[4747]: I1001 06:20:35.962297 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-57866998d-sbnth"] Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033556 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-policies\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033595 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-service-ca\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033618 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-ocp-branding-template\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033654 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-provider-selection\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033677 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-login\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033698 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r2th\" (UniqueName: \"kubernetes.io/projected/b35796dc-d78b-48ec-be7b-45d072b20fe8-kube-api-access-8r2th\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033720 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-idp-0-file-data\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033765 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-dir\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.033789 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034105 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-error\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034194 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-trusted-ca-bundle\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034234 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-router-certs\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034274 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-serving-cert\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034312 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-cliconfig\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034354 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-session\") pod \"b35796dc-d78b-48ec-be7b-45d072b20fe8\" (UID: \"b35796dc-d78b-48ec-be7b-45d072b20fe8\") " Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034467 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034522 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-error\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034595 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034671 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c34ccf20-8238-4812-84a1-4dc56b2be1c8-audit-dir\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034707 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m57w\" (UniqueName: \"kubernetes.io/projected/c34ccf20-8238-4812-84a1-4dc56b2be1c8-kube-api-access-5m57w\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034775 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-session\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034808 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034921 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034958 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-audit-policies\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.034983 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035004 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035022 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035049 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035073 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-login\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035125 4747 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035348 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035577 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035891 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.035940 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.039724 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.040169 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.040585 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.040911 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.041061 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b35796dc-d78b-48ec-be7b-45d072b20fe8-kube-api-access-8r2th" (OuterVolumeSpecName: "kube-api-access-8r2th") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "kube-api-access-8r2th". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.041471 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.041872 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.042173 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.047165 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "b35796dc-d78b-48ec-be7b-45d072b20fe8" (UID: "b35796dc-d78b-48ec-be7b-45d072b20fe8"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.136588 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.136672 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-login\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.136809 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.136860 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-error\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.136896 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.136969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c34ccf20-8238-4812-84a1-4dc56b2be1c8-audit-dir\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137019 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m57w\" (UniqueName: \"kubernetes.io/projected/c34ccf20-8238-4812-84a1-4dc56b2be1c8-kube-api-access-5m57w\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137070 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-session\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137121 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137221 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137276 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-audit-policies\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137320 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137375 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137416 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137689 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.137722 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.138836 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.138901 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.138934 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.138960 4747 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.138988 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.138837 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c34ccf20-8238-4812-84a1-4dc56b2be1c8-audit-dir\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.139320 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.139725 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-audit-policies\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.139846 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.139875 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r2th\" (UniqueName: \"kubernetes.io/projected/b35796dc-d78b-48ec-be7b-45d072b20fe8-kube-api-access-8r2th\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.139897 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.139918 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.139937 4747 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b35796dc-d78b-48ec-be7b-45d072b20fe8-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.141345 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.141580 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-service-ca\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.142450 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.142691 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-error\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.145842 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.146165 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.146330 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.146470 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-session\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.147066 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.147394 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-system-router-certs\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.147570 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c34ccf20-8238-4812-84a1-4dc56b2be1c8-v4-0-config-user-template-login\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.160149 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m57w\" (UniqueName: \"kubernetes.io/projected/c34ccf20-8238-4812-84a1-4dc56b2be1c8-kube-api-access-5m57w\") pod \"oauth-openshift-57866998d-sbnth\" (UID: \"c34ccf20-8238-4812-84a1-4dc56b2be1c8\") " pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.266608 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.763582 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-57866998d-sbnth"] Oct 01 06:20:36 crc kubenswrapper[4747]: W1001 06:20:36.769862 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc34ccf20_8238_4812_84a1_4dc56b2be1c8.slice/crio-9d0ece5715c154a100044c0ba66f7180261d6fdadd1d1796d283de6777dadfa3 WatchSource:0}: Error finding container 9d0ece5715c154a100044c0ba66f7180261d6fdadd1d1796d283de6777dadfa3: Status 404 returned error can't find the container with id 9d0ece5715c154a100044c0ba66f7180261d6fdadd1d1796d283de6777dadfa3 Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.825899 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-57866998d-sbnth" event={"ID":"c34ccf20-8238-4812-84a1-4dc56b2be1c8","Type":"ContainerStarted","Data":"9d0ece5715c154a100044c0ba66f7180261d6fdadd1d1796d283de6777dadfa3"} Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.828439 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" event={"ID":"b35796dc-d78b-48ec-be7b-45d072b20fe8","Type":"ContainerDied","Data":"287ad9362953ff9d7d51778d8cd3bf30ad8abff9ef9f095da210950d57ccd96f"} Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.828493 4747 scope.go:117] "RemoveContainer" containerID="fa24d3dc2abc747aeef354d36b2df016adceb90dfbc1148cb62f5560f013dfa8" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.828639 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-q2zvk" Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.881377 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q2zvk"] Oct 01 06:20:36 crc kubenswrapper[4747]: I1001 06:20:36.884797 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-q2zvk"] Oct 01 06:20:37 crc kubenswrapper[4747]: I1001 06:20:37.289363 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b35796dc-d78b-48ec-be7b-45d072b20fe8" path="/var/lib/kubelet/pods/b35796dc-d78b-48ec-be7b-45d072b20fe8/volumes" Oct 01 06:20:37 crc kubenswrapper[4747]: I1001 06:20:37.837375 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-57866998d-sbnth" event={"ID":"c34ccf20-8238-4812-84a1-4dc56b2be1c8","Type":"ContainerStarted","Data":"5bc475c9028c4560688e64917f0454ad7e06778964938ae221d72a8e02107d0b"} Oct 01 06:20:37 crc kubenswrapper[4747]: I1001 06:20:37.837644 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:37 crc kubenswrapper[4747]: I1001 06:20:37.846799 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-57866998d-sbnth" Oct 01 06:20:37 crc kubenswrapper[4747]: I1001 06:20:37.883924 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-57866998d-sbnth" podStartSLOduration=27.883893131 podStartE2EDuration="27.883893131s" podCreationTimestamp="2025-10-01 06:20:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:20:37.877900252 +0000 UTC m=+239.287557411" watchObservedRunningTime="2025-10-01 06:20:37.883893131 +0000 UTC m=+239.293550250" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.150887 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-899r6"] Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.152619 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-899r6" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="registry-server" containerID="cri-o://d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb" gracePeriod=30 Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.173482 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w8ltr"] Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.175475 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w8ltr" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerName="registry-server" containerID="cri-o://9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71" gracePeriod=30 Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.194882 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-284zz"] Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.195234 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" podUID="9d504980-0efe-4f16-b3ec-a94e4c0e0384" containerName="marketplace-operator" containerID="cri-o://fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d" gracePeriod=30 Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.219696 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r2t26"] Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.220120 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-r2t26" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" containerName="registry-server" containerID="cri-o://e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361" gracePeriod=30 Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.227225 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2gdmv"] Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.227605 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2gdmv" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerName="registry-server" containerID="cri-o://04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe" gracePeriod=30 Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.245410 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mcc6b"] Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.247029 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.251575 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mcc6b"] Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.364592 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f7a3b74b-4658-470c-bea5-0a7431da169e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.364862 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmzcw\" (UniqueName: \"kubernetes.io/projected/f7a3b74b-4658-470c-bea5-0a7431da169e-kube-api-access-cmzcw\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.364921 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f7a3b74b-4658-470c-bea5-0a7431da169e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.465918 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f7a3b74b-4658-470c-bea5-0a7431da169e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.465979 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmzcw\" (UniqueName: \"kubernetes.io/projected/f7a3b74b-4658-470c-bea5-0a7431da169e-kube-api-access-cmzcw\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.466043 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f7a3b74b-4658-470c-bea5-0a7431da169e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.467472 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f7a3b74b-4658-470c-bea5-0a7431da169e-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.472209 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f7a3b74b-4658-470c-bea5-0a7431da169e-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.483585 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmzcw\" (UniqueName: \"kubernetes.io/projected/f7a3b74b-4658-470c-bea5-0a7431da169e-kube-api-access-cmzcw\") pod \"marketplace-operator-79b997595-mcc6b\" (UID: \"f7a3b74b-4658-470c-bea5-0a7431da169e\") " pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: E1001 06:21:14.504275 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb is running failed: container process not found" containerID="d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 06:21:14 crc kubenswrapper[4747]: E1001 06:21:14.505176 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb is running failed: container process not found" containerID="d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 06:21:14 crc kubenswrapper[4747]: E1001 06:21:14.506897 4747 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb is running failed: container process not found" containerID="d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb" cmd=["grpc_health_probe","-addr=:50051"] Oct 01 06:21:14 crc kubenswrapper[4747]: E1001 06:21:14.506972 4747 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-899r6" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="registry-server" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.560861 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.599618 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.611381 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.659820 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.666916 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.711591 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769604 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5nhk\" (UniqueName: \"kubernetes.io/projected/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-kube-api-access-k5nhk\") pod \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769667 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-utilities\") pod \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769704 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-trusted-ca\") pod \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769734 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwr88\" (UniqueName: \"kubernetes.io/projected/9d504980-0efe-4f16-b3ec-a94e4c0e0384-kube-api-access-cwr88\") pod \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769772 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnr8d\" (UniqueName: \"kubernetes.io/projected/055f5415-eb0a-47cd-9f51-9fe5751b890a-kube-api-access-vnr8d\") pod \"055f5415-eb0a-47cd-9f51-9fe5751b890a\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769796 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-operator-metrics\") pod \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\" (UID: \"9d504980-0efe-4f16-b3ec-a94e4c0e0384\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769815 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-utilities\") pod \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769851 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-catalog-content\") pod \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769893 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-catalog-content\") pod \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\" (UID: \"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769912 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-catalog-content\") pod \"055f5415-eb0a-47cd-9f51-9fe5751b890a\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769935 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-utilities\") pod \"055f5415-eb0a-47cd-9f51-9fe5751b890a\" (UID: \"055f5415-eb0a-47cd-9f51-9fe5751b890a\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.769951 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xc4cs\" (UniqueName: \"kubernetes.io/projected/8e6ee5ef-246c-41dd-a003-d590e1e1119b-kube-api-access-xc4cs\") pod \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\" (UID: \"8e6ee5ef-246c-41dd-a003-d590e1e1119b\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.770711 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-utilities" (OuterVolumeSpecName: "utilities") pod "8e6ee5ef-246c-41dd-a003-d590e1e1119b" (UID: "8e6ee5ef-246c-41dd-a003-d590e1e1119b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.771271 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-utilities" (OuterVolumeSpecName: "utilities") pod "055f5415-eb0a-47cd-9f51-9fe5751b890a" (UID: "055f5415-eb0a-47cd-9f51-9fe5751b890a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.772579 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "9d504980-0efe-4f16-b3ec-a94e4c0e0384" (UID: "9d504980-0efe-4f16-b3ec-a94e4c0e0384"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.773422 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-utilities" (OuterVolumeSpecName: "utilities") pod "fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" (UID: "fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.775862 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "9d504980-0efe-4f16-b3ec-a94e4c0e0384" (UID: "9d504980-0efe-4f16-b3ec-a94e4c0e0384"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.776890 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e6ee5ef-246c-41dd-a003-d590e1e1119b-kube-api-access-xc4cs" (OuterVolumeSpecName: "kube-api-access-xc4cs") pod "8e6ee5ef-246c-41dd-a003-d590e1e1119b" (UID: "8e6ee5ef-246c-41dd-a003-d590e1e1119b"). InnerVolumeSpecName "kube-api-access-xc4cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.776931 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d504980-0efe-4f16-b3ec-a94e4c0e0384-kube-api-access-cwr88" (OuterVolumeSpecName: "kube-api-access-cwr88") pod "9d504980-0efe-4f16-b3ec-a94e4c0e0384" (UID: "9d504980-0efe-4f16-b3ec-a94e4c0e0384"). InnerVolumeSpecName "kube-api-access-cwr88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.780929 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-kube-api-access-k5nhk" (OuterVolumeSpecName: "kube-api-access-k5nhk") pod "fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" (UID: "fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e"). InnerVolumeSpecName "kube-api-access-k5nhk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.781585 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/055f5415-eb0a-47cd-9f51-9fe5751b890a-kube-api-access-vnr8d" (OuterVolumeSpecName: "kube-api-access-vnr8d") pod "055f5415-eb0a-47cd-9f51-9fe5751b890a" (UID: "055f5415-eb0a-47cd-9f51-9fe5751b890a"). InnerVolumeSpecName "kube-api-access-vnr8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.821482 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" (UID: "fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.830427 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e6ee5ef-246c-41dd-a003-d590e1e1119b" (UID: "8e6ee5ef-246c-41dd-a003-d590e1e1119b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.849993 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "055f5415-eb0a-47cd-9f51-9fe5751b890a" (UID: "055f5415-eb0a-47cd-9f51-9fe5751b890a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.870781 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-catalog-content\") pod \"98569525-9a56-4701-9a06-46ac13cb40c3\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.870861 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-utilities\") pod \"98569525-9a56-4701-9a06-46ac13cb40c3\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.870891 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwtzg\" (UniqueName: \"kubernetes.io/projected/98569525-9a56-4701-9a06-46ac13cb40c3-kube-api-access-rwtzg\") pod \"98569525-9a56-4701-9a06-46ac13cb40c3\" (UID: \"98569525-9a56-4701-9a06-46ac13cb40c3\") " Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871136 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871160 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871174 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwr88\" (UniqueName: \"kubernetes.io/projected/9d504980-0efe-4f16-b3ec-a94e4c0e0384-kube-api-access-cwr88\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871186 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnr8d\" (UniqueName: \"kubernetes.io/projected/055f5415-eb0a-47cd-9f51-9fe5751b890a-kube-api-access-vnr8d\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871200 4747 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9d504980-0efe-4f16-b3ec-a94e4c0e0384-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871212 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871225 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e6ee5ef-246c-41dd-a003-d590e1e1119b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871236 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871246 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871257 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055f5415-eb0a-47cd-9f51-9fe5751b890a-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871269 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xc4cs\" (UniqueName: \"kubernetes.io/projected/8e6ee5ef-246c-41dd-a003-d590e1e1119b-kube-api-access-xc4cs\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.871281 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5nhk\" (UniqueName: \"kubernetes.io/projected/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e-kube-api-access-k5nhk\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.873832 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98569525-9a56-4701-9a06-46ac13cb40c3-kube-api-access-rwtzg" (OuterVolumeSpecName: "kube-api-access-rwtzg") pod "98569525-9a56-4701-9a06-46ac13cb40c3" (UID: "98569525-9a56-4701-9a06-46ac13cb40c3"). InnerVolumeSpecName "kube-api-access-rwtzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.875484 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-utilities" (OuterVolumeSpecName: "utilities") pod "98569525-9a56-4701-9a06-46ac13cb40c3" (UID: "98569525-9a56-4701-9a06-46ac13cb40c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.884493 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "98569525-9a56-4701-9a06-46ac13cb40c3" (UID: "98569525-9a56-4701-9a06-46ac13cb40c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.972131 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.972469 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/98569525-9a56-4701-9a06-46ac13cb40c3-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:14 crc kubenswrapper[4747]: I1001 06:21:14.972561 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwtzg\" (UniqueName: \"kubernetes.io/projected/98569525-9a56-4701-9a06-46ac13cb40c3-kube-api-access-rwtzg\") on node \"crc\" DevicePath \"\"" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.022295 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-mcc6b"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.088717 4747 generic.go:334] "Generic (PLEG): container finished" podID="98569525-9a56-4701-9a06-46ac13cb40c3" containerID="e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361" exitCode=0 Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.088813 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r2t26" event={"ID":"98569525-9a56-4701-9a06-46ac13cb40c3","Type":"ContainerDied","Data":"e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.088847 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r2t26" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.088875 4747 scope.go:117] "RemoveContainer" containerID="e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.088861 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r2t26" event={"ID":"98569525-9a56-4701-9a06-46ac13cb40c3","Type":"ContainerDied","Data":"88b22cf187add4323ec199f4f1d0809e7765a5aee9cb657fadc32f47841f487b"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.094329 4747 generic.go:334] "Generic (PLEG): container finished" podID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerID="9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71" exitCode=0 Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.094416 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w8ltr" event={"ID":"8e6ee5ef-246c-41dd-a003-d590e1e1119b","Type":"ContainerDied","Data":"9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.094451 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w8ltr" event={"ID":"8e6ee5ef-246c-41dd-a003-d590e1e1119b","Type":"ContainerDied","Data":"30cbd2d3177c60567157e16a090a15f34c788455e9aa4c5f014308764c1b5712"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.095881 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w8ltr" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.097360 4747 generic.go:334] "Generic (PLEG): container finished" podID="9d504980-0efe-4f16-b3ec-a94e4c0e0384" containerID="fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d" exitCode=0 Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.097434 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.097448 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" event={"ID":"9d504980-0efe-4f16-b3ec-a94e4c0e0384","Type":"ContainerDied","Data":"fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.097484 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-284zz" event={"ID":"9d504980-0efe-4f16-b3ec-a94e4c0e0384","Type":"ContainerDied","Data":"05671cf6609bc4d2069365435a17aad42019f2eeffd56986af585ff0ad11bc27"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.105741 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" event={"ID":"f7a3b74b-4658-470c-bea5-0a7431da169e","Type":"ContainerStarted","Data":"afbf10e5e1ce632a58cd14bc318ae7e0bf7dc85009700a9ec2ca2e8aad08c3b6"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.117638 4747 generic.go:334] "Generic (PLEG): container finished" podID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerID="04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe" exitCode=0 Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.117740 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gdmv" event={"ID":"055f5415-eb0a-47cd-9f51-9fe5751b890a","Type":"ContainerDied","Data":"04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.117746 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2gdmv" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.117818 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2gdmv" event={"ID":"055f5415-eb0a-47cd-9f51-9fe5751b890a","Type":"ContainerDied","Data":"f3784c009faa18dd3d788788a11e96d3b84c91a11b90899365a7851879f163bc"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.118228 4747 scope.go:117] "RemoveContainer" containerID="9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.122830 4747 generic.go:334] "Generic (PLEG): container finished" podID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerID="d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb" exitCode=0 Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.122881 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899r6" event={"ID":"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e","Type":"ContainerDied","Data":"d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.122914 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-899r6" event={"ID":"fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e","Type":"ContainerDied","Data":"91d81e33c1c025150400cebe8fb08ec3f33b78035d6cde4ca9503e0a6d922330"} Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.122922 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-899r6" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.137658 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r2t26"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.151508 4747 scope.go:117] "RemoveContainer" containerID="ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.152337 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-r2t26"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.166418 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-284zz"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.174363 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-284zz"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.178058 4747 scope.go:117] "RemoveContainer" containerID="e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.181936 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361\": container with ID starting with e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361 not found: ID does not exist" containerID="e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.181981 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361"} err="failed to get container status \"e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361\": rpc error: code = NotFound desc = could not find container \"e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361\": container with ID starting with e739e6f4df38024e25ed84ed75033f91085560c41ac188df1d6bef091d5f1361 not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.182006 4747 scope.go:117] "RemoveContainer" containerID="9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.184271 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82\": container with ID starting with 9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82 not found: ID does not exist" containerID="9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.184336 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82"} err="failed to get container status \"9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82\": rpc error: code = NotFound desc = could not find container \"9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82\": container with ID starting with 9be7941308b8b3428d223b395a9b1758f9ecd7d8f4ea9a5446bee5a6e585eb82 not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.184417 4747 scope.go:117] "RemoveContainer" containerID="ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.184826 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c\": container with ID starting with ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c not found: ID does not exist" containerID="ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.184846 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c"} err="failed to get container status \"ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c\": rpc error: code = NotFound desc = could not find container \"ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c\": container with ID starting with ac45c3b5d9c1b1cbf88cdff20bfc3e6f31430987e2327a5095db89f4516b133c not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.184859 4747 scope.go:117] "RemoveContainer" containerID="9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.188562 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-899r6"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.189812 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-899r6"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.192149 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w8ltr"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.198096 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w8ltr"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.203186 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2gdmv"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.203606 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2gdmv"] Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.206021 4747 scope.go:117] "RemoveContainer" containerID="c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.223916 4747 scope.go:117] "RemoveContainer" containerID="6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.238569 4747 scope.go:117] "RemoveContainer" containerID="9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.239067 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71\": container with ID starting with 9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71 not found: ID does not exist" containerID="9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.239125 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71"} err="failed to get container status \"9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71\": rpc error: code = NotFound desc = could not find container \"9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71\": container with ID starting with 9e07d29983b3227d7573f15355d315b26fea92fd5e9f9c56823e94cb257efa71 not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.239159 4747 scope.go:117] "RemoveContainer" containerID="c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.239603 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364\": container with ID starting with c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364 not found: ID does not exist" containerID="c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.239646 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364"} err="failed to get container status \"c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364\": rpc error: code = NotFound desc = could not find container \"c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364\": container with ID starting with c135cc97cab9212db9e9f0e07d6df3679abef8c296638a6725a5812043e4a364 not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.239677 4747 scope.go:117] "RemoveContainer" containerID="6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.240049 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b\": container with ID starting with 6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b not found: ID does not exist" containerID="6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.240078 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b"} err="failed to get container status \"6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b\": rpc error: code = NotFound desc = could not find container \"6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b\": container with ID starting with 6124cf08ab14d5baface208480bcda409c6fa0b3c02539e8a377864d4324b65b not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.240096 4747 scope.go:117] "RemoveContainer" containerID="fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.253063 4747 scope.go:117] "RemoveContainer" containerID="fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.253410 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d\": container with ID starting with fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d not found: ID does not exist" containerID="fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.253446 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d"} err="failed to get container status \"fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d\": rpc error: code = NotFound desc = could not find container \"fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d\": container with ID starting with fa3cb1ff8388b1f6b946d714bfa9408fe90e9277860d133a1f050bd99c25ad2d not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.253465 4747 scope.go:117] "RemoveContainer" containerID="04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.284154 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" path="/var/lib/kubelet/pods/055f5415-eb0a-47cd-9f51-9fe5751b890a/volumes" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.285104 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" path="/var/lib/kubelet/pods/8e6ee5ef-246c-41dd-a003-d590e1e1119b/volumes" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.285891 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" path="/var/lib/kubelet/pods/98569525-9a56-4701-9a06-46ac13cb40c3/volumes" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.287222 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d504980-0efe-4f16-b3ec-a94e4c0e0384" path="/var/lib/kubelet/pods/9d504980-0efe-4f16-b3ec-a94e4c0e0384/volumes" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.287837 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" path="/var/lib/kubelet/pods/fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e/volumes" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.295004 4747 scope.go:117] "RemoveContainer" containerID="8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.311036 4747 scope.go:117] "RemoveContainer" containerID="ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.340504 4747 scope.go:117] "RemoveContainer" containerID="04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.340982 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe\": container with ID starting with 04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe not found: ID does not exist" containerID="04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.341043 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe"} err="failed to get container status \"04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe\": rpc error: code = NotFound desc = could not find container \"04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe\": container with ID starting with 04b86719f4ca04daa15f0b3b2d1ab4c2384258132f9b7397e83e4da7e4cd8dbe not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.341077 4747 scope.go:117] "RemoveContainer" containerID="8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.341410 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122\": container with ID starting with 8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122 not found: ID does not exist" containerID="8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.341451 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122"} err="failed to get container status \"8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122\": rpc error: code = NotFound desc = could not find container \"8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122\": container with ID starting with 8969211a4c3e5621983645aa5152f280b1addca74ba0dbd0850ee7e96f12e122 not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.341477 4747 scope.go:117] "RemoveContainer" containerID="ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.341847 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3\": container with ID starting with ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3 not found: ID does not exist" containerID="ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.341882 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3"} err="failed to get container status \"ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3\": rpc error: code = NotFound desc = could not find container \"ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3\": container with ID starting with ebaa7ff65a058845b093a14ffeb584ba84c31cd1fc962aeebb564433aae1a7b3 not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.341905 4747 scope.go:117] "RemoveContainer" containerID="d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.360868 4747 scope.go:117] "RemoveContainer" containerID="0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.373976 4747 scope.go:117] "RemoveContainer" containerID="75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.388340 4747 scope.go:117] "RemoveContainer" containerID="d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.388830 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb\": container with ID starting with d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb not found: ID does not exist" containerID="d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.388867 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb"} err="failed to get container status \"d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb\": rpc error: code = NotFound desc = could not find container \"d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb\": container with ID starting with d877cad5c312d1ef40c14cfb2c09e6d4141c01689adad7c6fdf7a9e0819d75bb not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.388897 4747 scope.go:117] "RemoveContainer" containerID="0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.389260 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333\": container with ID starting with 0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333 not found: ID does not exist" containerID="0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.389291 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333"} err="failed to get container status \"0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333\": rpc error: code = NotFound desc = could not find container \"0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333\": container with ID starting with 0f15ca5fcb5f9ab06bdf5d24dff84adf36a6b98070ffae4a9cd345341c9fd333 not found: ID does not exist" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.389311 4747 scope.go:117] "RemoveContainer" containerID="75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871" Oct 01 06:21:15 crc kubenswrapper[4747]: E1001 06:21:15.389655 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871\": container with ID starting with 75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871 not found: ID does not exist" containerID="75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871" Oct 01 06:21:15 crc kubenswrapper[4747]: I1001 06:21:15.389680 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871"} err="failed to get container status \"75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871\": rpc error: code = NotFound desc = could not find container \"75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871\": container with ID starting with 75761a6be47d7bc35f0b14e74baa447883510852390af697c44494eb15b1d871 not found: ID does not exist" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.131447 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" event={"ID":"f7a3b74b-4658-470c-bea5-0a7431da169e","Type":"ContainerStarted","Data":"7c28b38ca08e1570519064a6ed277d3e28ecba2fcbaebf93ec2887bf29ff208a"} Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.132013 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.137644 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.154618 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-mcc6b" podStartSLOduration=2.154587476 podStartE2EDuration="2.154587476s" podCreationTimestamp="2025-10-01 06:21:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:21:16.152689838 +0000 UTC m=+277.562346917" watchObservedRunningTime="2025-10-01 06:21:16.154587476 +0000 UTC m=+277.564244535" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.363597 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zh5bf"] Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364424 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerName="extract-content" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364443 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerName="extract-content" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364456 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d504980-0efe-4f16-b3ec-a94e4c0e0384" containerName="marketplace-operator" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364464 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d504980-0efe-4f16-b3ec-a94e4c0e0384" containerName="marketplace-operator" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364480 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364489 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364502 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364510 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364523 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364532 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364548 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerName="extract-utilities" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364557 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerName="extract-utilities" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364568 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" containerName="extract-utilities" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364576 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" containerName="extract-utilities" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364591 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerName="extract-utilities" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364600 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerName="extract-utilities" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364612 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerName="extract-content" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364619 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerName="extract-content" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364628 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="extract-content" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364638 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="extract-content" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364654 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364663 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364674 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="extract-utilities" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364682 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="extract-utilities" Oct 01 06:21:16 crc kubenswrapper[4747]: E1001 06:21:16.364696 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" containerName="extract-content" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364704 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" containerName="extract-content" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364856 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdcf3931-48f0-49aa-b7b8-bbfbb3aefc6e" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364874 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d504980-0efe-4f16-b3ec-a94e4c0e0384" containerName="marketplace-operator" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364884 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e6ee5ef-246c-41dd-a003-d590e1e1119b" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364895 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="055f5415-eb0a-47cd-9f51-9fe5751b890a" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.364911 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="98569525-9a56-4701-9a06-46ac13cb40c3" containerName="registry-server" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.366180 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.371047 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.375590 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zh5bf"] Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.519772 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b657e3a5-1d77-412c-999e-1f386d9724bf-catalog-content\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.519831 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b657e3a5-1d77-412c-999e-1f386d9724bf-utilities\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.519859 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrkxs\" (UniqueName: \"kubernetes.io/projected/b657e3a5-1d77-412c-999e-1f386d9724bf-kube-api-access-vrkxs\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.561594 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4bfp9"] Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.563464 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.565645 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.591078 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bfp9"] Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.622006 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b657e3a5-1d77-412c-999e-1f386d9724bf-utilities\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.622071 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrkxs\" (UniqueName: \"kubernetes.io/projected/b657e3a5-1d77-412c-999e-1f386d9724bf-kube-api-access-vrkxs\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.622184 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b657e3a5-1d77-412c-999e-1f386d9724bf-catalog-content\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.622719 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b657e3a5-1d77-412c-999e-1f386d9724bf-utilities\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.622818 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b657e3a5-1d77-412c-999e-1f386d9724bf-catalog-content\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.664926 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrkxs\" (UniqueName: \"kubernetes.io/projected/b657e3a5-1d77-412c-999e-1f386d9724bf-kube-api-access-vrkxs\") pod \"redhat-marketplace-zh5bf\" (UID: \"b657e3a5-1d77-412c-999e-1f386d9724bf\") " pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.723269 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18fb0a3d-c326-4741-a6af-4897f1740900-catalog-content\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.723343 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18fb0a3d-c326-4741-a6af-4897f1740900-utilities\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.723363 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz2dd\" (UniqueName: \"kubernetes.io/projected/18fb0a3d-c326-4741-a6af-4897f1740900-kube-api-access-vz2dd\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.746285 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.827467 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18fb0a3d-c326-4741-a6af-4897f1740900-utilities\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.827524 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz2dd\" (UniqueName: \"kubernetes.io/projected/18fb0a3d-c326-4741-a6af-4897f1740900-kube-api-access-vz2dd\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.827614 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18fb0a3d-c326-4741-a6af-4897f1740900-catalog-content\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.828449 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18fb0a3d-c326-4741-a6af-4897f1740900-catalog-content\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.828961 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18fb0a3d-c326-4741-a6af-4897f1740900-utilities\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.844380 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz2dd\" (UniqueName: \"kubernetes.io/projected/18fb0a3d-c326-4741-a6af-4897f1740900-kube-api-access-vz2dd\") pod \"certified-operators-4bfp9\" (UID: \"18fb0a3d-c326-4741-a6af-4897f1740900\") " pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.889080 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:16 crc kubenswrapper[4747]: I1001 06:21:16.969900 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zh5bf"] Oct 01 06:21:17 crc kubenswrapper[4747]: I1001 06:21:17.095503 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4bfp9"] Oct 01 06:21:17 crc kubenswrapper[4747]: W1001 06:21:17.101229 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18fb0a3d_c326_4741_a6af_4897f1740900.slice/crio-043ea93535b4c3c63970e75881f5cce226ad8558c4e2abc86b9438a9af41fcd4 WatchSource:0}: Error finding container 043ea93535b4c3c63970e75881f5cce226ad8558c4e2abc86b9438a9af41fcd4: Status 404 returned error can't find the container with id 043ea93535b4c3c63970e75881f5cce226ad8558c4e2abc86b9438a9af41fcd4 Oct 01 06:21:17 crc kubenswrapper[4747]: I1001 06:21:17.142256 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh5bf" event={"ID":"b657e3a5-1d77-412c-999e-1f386d9724bf","Type":"ContainerStarted","Data":"30dd381a139655065653410f60e8774410ba26eade208b867f73c3b59652f07f"} Oct 01 06:21:17 crc kubenswrapper[4747]: I1001 06:21:17.142295 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh5bf" event={"ID":"b657e3a5-1d77-412c-999e-1f386d9724bf","Type":"ContainerStarted","Data":"d84dd18678b3c278e80df9364abfcf115313b5956d2d0ffaa6272e39ad58bbef"} Oct 01 06:21:17 crc kubenswrapper[4747]: I1001 06:21:17.145956 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bfp9" event={"ID":"18fb0a3d-c326-4741-a6af-4897f1740900","Type":"ContainerStarted","Data":"043ea93535b4c3c63970e75881f5cce226ad8558c4e2abc86b9438a9af41fcd4"} Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.153376 4747 generic.go:334] "Generic (PLEG): container finished" podID="b657e3a5-1d77-412c-999e-1f386d9724bf" containerID="30dd381a139655065653410f60e8774410ba26eade208b867f73c3b59652f07f" exitCode=0 Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.153455 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh5bf" event={"ID":"b657e3a5-1d77-412c-999e-1f386d9724bf","Type":"ContainerDied","Data":"30dd381a139655065653410f60e8774410ba26eade208b867f73c3b59652f07f"} Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.158452 4747 generic.go:334] "Generic (PLEG): container finished" podID="18fb0a3d-c326-4741-a6af-4897f1740900" containerID="6e06841cf8da91c08bdd186b48d36cf1711d79e2e31392a003f5d2fbda57b2dc" exitCode=0 Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.158552 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bfp9" event={"ID":"18fb0a3d-c326-4741-a6af-4897f1740900","Type":"ContainerDied","Data":"6e06841cf8da91c08bdd186b48d36cf1711d79e2e31392a003f5d2fbda57b2dc"} Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.777078 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-27npp"] Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.778398 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.784346 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-27npp"] Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.786056 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.962923 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chlqh\" (UniqueName: \"kubernetes.io/projected/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-kube-api-access-chlqh\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.966118 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-utilities\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.966151 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-catalog-content\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.968121 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9xksc"] Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.969057 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.970706 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 01 06:21:18 crc kubenswrapper[4747]: I1001 06:21:18.977626 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9xksc"] Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.068370 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-utilities\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.068419 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chlqh\" (UniqueName: \"kubernetes.io/projected/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-kube-api-access-chlqh\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.068448 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-catalog-content\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.068474 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-utilities\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.068498 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-catalog-content\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.068518 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xxgq\" (UniqueName: \"kubernetes.io/projected/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-kube-api-access-6xxgq\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.069428 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-utilities\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.069812 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-catalog-content\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.088579 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chlqh\" (UniqueName: \"kubernetes.io/projected/2b622aa3-38b6-46f4-b2bc-91208b5ecf20-kube-api-access-chlqh\") pod \"redhat-operators-27npp\" (UID: \"2b622aa3-38b6-46f4-b2bc-91208b5ecf20\") " pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.151636 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.164937 4747 generic.go:334] "Generic (PLEG): container finished" podID="18fb0a3d-c326-4741-a6af-4897f1740900" containerID="dcfdef41d019ccd0df774b259c256b5fe5ec46ef0d066de6d10b9168fec9c253" exitCode=0 Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.164982 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bfp9" event={"ID":"18fb0a3d-c326-4741-a6af-4897f1740900","Type":"ContainerDied","Data":"dcfdef41d019ccd0df774b259c256b5fe5ec46ef0d066de6d10b9168fec9c253"} Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.170372 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-utilities\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.170469 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-catalog-content\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.170523 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xxgq\" (UniqueName: \"kubernetes.io/projected/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-kube-api-access-6xxgq\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.172171 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-utilities\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.172361 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-catalog-content\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.195741 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xxgq\" (UniqueName: \"kubernetes.io/projected/6e2e0a71-6f8f-49e7-b077-2dab23e591f2-kube-api-access-6xxgq\") pod \"community-operators-9xksc\" (UID: \"6e2e0a71-6f8f-49e7-b077-2dab23e591f2\") " pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.292359 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.362928 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-27npp"] Oct 01 06:21:19 crc kubenswrapper[4747]: I1001 06:21:19.471246 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9xksc"] Oct 01 06:21:19 crc kubenswrapper[4747]: W1001 06:21:19.476964 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e2e0a71_6f8f_49e7_b077_2dab23e591f2.slice/crio-b7c2f579bec57fa21fac4f9a2e7edce9c3d19aad918915921d26e6fe644dd869 WatchSource:0}: Error finding container b7c2f579bec57fa21fac4f9a2e7edce9c3d19aad918915921d26e6fe644dd869: Status 404 returned error can't find the container with id b7c2f579bec57fa21fac4f9a2e7edce9c3d19aad918915921d26e6fe644dd869 Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.176021 4747 generic.go:334] "Generic (PLEG): container finished" podID="b657e3a5-1d77-412c-999e-1f386d9724bf" containerID="e30cf7fbdd6acd2a54941590cc641a67daa1548e4797facdb63ebc3021c91f60" exitCode=0 Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.176072 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh5bf" event={"ID":"b657e3a5-1d77-412c-999e-1f386d9724bf","Type":"ContainerDied","Data":"e30cf7fbdd6acd2a54941590cc641a67daa1548e4797facdb63ebc3021c91f60"} Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.179044 4747 generic.go:334] "Generic (PLEG): container finished" podID="2b622aa3-38b6-46f4-b2bc-91208b5ecf20" containerID="d01fdb4870259ef5fd6b025c85cc37277d7341107091abff3b72df02be47862a" exitCode=0 Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.179154 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-27npp" event={"ID":"2b622aa3-38b6-46f4-b2bc-91208b5ecf20","Type":"ContainerDied","Data":"d01fdb4870259ef5fd6b025c85cc37277d7341107091abff3b72df02be47862a"} Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.179205 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-27npp" event={"ID":"2b622aa3-38b6-46f4-b2bc-91208b5ecf20","Type":"ContainerStarted","Data":"8817f7f83545b8fab6e976b0adf0687067ee6786e4faa7121978538f6248d938"} Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.183558 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4bfp9" event={"ID":"18fb0a3d-c326-4741-a6af-4897f1740900","Type":"ContainerStarted","Data":"5a5574831fa86f0b5aad8dcececc01872085e0b0bab4aaea0191bb582b5eefaf"} Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.188529 4747 generic.go:334] "Generic (PLEG): container finished" podID="6e2e0a71-6f8f-49e7-b077-2dab23e591f2" containerID="c4634cf91a645b138a7478b1ad728339cc6a0534f0072825f496d54d1cab7bc4" exitCode=0 Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.188598 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xksc" event={"ID":"6e2e0a71-6f8f-49e7-b077-2dab23e591f2","Type":"ContainerDied","Data":"c4634cf91a645b138a7478b1ad728339cc6a0534f0072825f496d54d1cab7bc4"} Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.188651 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xksc" event={"ID":"6e2e0a71-6f8f-49e7-b077-2dab23e591f2","Type":"ContainerStarted","Data":"b7c2f579bec57fa21fac4f9a2e7edce9c3d19aad918915921d26e6fe644dd869"} Oct 01 06:21:20 crc kubenswrapper[4747]: I1001 06:21:20.236670 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4bfp9" podStartSLOduration=2.813878898 podStartE2EDuration="4.236642218s" podCreationTimestamp="2025-10-01 06:21:16 +0000 UTC" firstStartedPulling="2025-10-01 06:21:18.16086388 +0000 UTC m=+279.570520969" lastFinishedPulling="2025-10-01 06:21:19.58362724 +0000 UTC m=+280.993284289" observedRunningTime="2025-10-01 06:21:20.230947895 +0000 UTC m=+281.640604954" watchObservedRunningTime="2025-10-01 06:21:20.236642218 +0000 UTC m=+281.646299307" Oct 01 06:21:21 crc kubenswrapper[4747]: I1001 06:21:21.202198 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zh5bf" event={"ID":"b657e3a5-1d77-412c-999e-1f386d9724bf","Type":"ContainerStarted","Data":"7b571c7f245d62086a20446bcfe77aba107d5b17cca9e06025dab7f0268aeb5c"} Oct 01 06:21:21 crc kubenswrapper[4747]: I1001 06:21:21.204168 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-27npp" event={"ID":"2b622aa3-38b6-46f4-b2bc-91208b5ecf20","Type":"ContainerStarted","Data":"f7b6c1f435a0ea2d9dc860a4101da42c89f6e32b3ce099241f03eeb0e6fbfec9"} Oct 01 06:21:21 crc kubenswrapper[4747]: I1001 06:21:21.206294 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xksc" event={"ID":"6e2e0a71-6f8f-49e7-b077-2dab23e591f2","Type":"ContainerStarted","Data":"aa7d26a52c2a3ff5de1b930d0f61dd0e26a66ff6b1eab0059361c6c372cfe4ae"} Oct 01 06:21:21 crc kubenswrapper[4747]: I1001 06:21:21.224060 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zh5bf" podStartSLOduration=2.734599267 podStartE2EDuration="5.224040523s" podCreationTimestamp="2025-10-01 06:21:16 +0000 UTC" firstStartedPulling="2025-10-01 06:21:18.160812459 +0000 UTC m=+279.570469558" lastFinishedPulling="2025-10-01 06:21:20.650253765 +0000 UTC m=+282.059910814" observedRunningTime="2025-10-01 06:21:21.222273699 +0000 UTC m=+282.631930748" watchObservedRunningTime="2025-10-01 06:21:21.224040523 +0000 UTC m=+282.633697582" Oct 01 06:21:22 crc kubenswrapper[4747]: I1001 06:21:22.212187 4747 generic.go:334] "Generic (PLEG): container finished" podID="2b622aa3-38b6-46f4-b2bc-91208b5ecf20" containerID="f7b6c1f435a0ea2d9dc860a4101da42c89f6e32b3ce099241f03eeb0e6fbfec9" exitCode=0 Oct 01 06:21:22 crc kubenswrapper[4747]: I1001 06:21:22.212261 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-27npp" event={"ID":"2b622aa3-38b6-46f4-b2bc-91208b5ecf20","Type":"ContainerDied","Data":"f7b6c1f435a0ea2d9dc860a4101da42c89f6e32b3ce099241f03eeb0e6fbfec9"} Oct 01 06:21:22 crc kubenswrapper[4747]: I1001 06:21:22.217655 4747 generic.go:334] "Generic (PLEG): container finished" podID="6e2e0a71-6f8f-49e7-b077-2dab23e591f2" containerID="aa7d26a52c2a3ff5de1b930d0f61dd0e26a66ff6b1eab0059361c6c372cfe4ae" exitCode=0 Oct 01 06:21:22 crc kubenswrapper[4747]: I1001 06:21:22.218855 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xksc" event={"ID":"6e2e0a71-6f8f-49e7-b077-2dab23e591f2","Type":"ContainerDied","Data":"aa7d26a52c2a3ff5de1b930d0f61dd0e26a66ff6b1eab0059361c6c372cfe4ae"} Oct 01 06:21:23 crc kubenswrapper[4747]: I1001 06:21:23.230253 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9xksc" event={"ID":"6e2e0a71-6f8f-49e7-b077-2dab23e591f2","Type":"ContainerStarted","Data":"68b1720bad87a08d6731ae7aacb617adedfab5ecb606a9aaf53c5c6ede1d30ea"} Oct 01 06:21:23 crc kubenswrapper[4747]: I1001 06:21:23.232496 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-27npp" event={"ID":"2b622aa3-38b6-46f4-b2bc-91208b5ecf20","Type":"ContainerStarted","Data":"91d37ade80db344549a2fc987e77f5dc25492843390ecadd3fb1fc59d3c2594b"} Oct 01 06:21:23 crc kubenswrapper[4747]: I1001 06:21:23.276185 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9xksc" podStartSLOduration=2.679929782 podStartE2EDuration="5.276164632s" podCreationTimestamp="2025-10-01 06:21:18 +0000 UTC" firstStartedPulling="2025-10-01 06:21:20.189994391 +0000 UTC m=+281.599651480" lastFinishedPulling="2025-10-01 06:21:22.786229291 +0000 UTC m=+284.195886330" observedRunningTime="2025-10-01 06:21:23.247565011 +0000 UTC m=+284.657222080" watchObservedRunningTime="2025-10-01 06:21:23.276164632 +0000 UTC m=+284.685821681" Oct 01 06:21:23 crc kubenswrapper[4747]: I1001 06:21:23.278251 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-27npp" podStartSLOduration=2.740608793 podStartE2EDuration="5.278242135s" podCreationTimestamp="2025-10-01 06:21:18 +0000 UTC" firstStartedPulling="2025-10-01 06:21:20.181558908 +0000 UTC m=+281.591215997" lastFinishedPulling="2025-10-01 06:21:22.71919229 +0000 UTC m=+284.128849339" observedRunningTime="2025-10-01 06:21:23.274608053 +0000 UTC m=+284.684265112" watchObservedRunningTime="2025-10-01 06:21:23.278242135 +0000 UTC m=+284.687899194" Oct 01 06:21:26 crc kubenswrapper[4747]: I1001 06:21:26.747372 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:26 crc kubenswrapper[4747]: I1001 06:21:26.747696 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:26 crc kubenswrapper[4747]: I1001 06:21:26.796069 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:26 crc kubenswrapper[4747]: I1001 06:21:26.889468 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:26 crc kubenswrapper[4747]: I1001 06:21:26.889536 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:26 crc kubenswrapper[4747]: I1001 06:21:26.931832 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:27 crc kubenswrapper[4747]: I1001 06:21:27.301255 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zh5bf" Oct 01 06:21:27 crc kubenswrapper[4747]: I1001 06:21:27.302781 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4bfp9" Oct 01 06:21:29 crc kubenswrapper[4747]: I1001 06:21:29.152298 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:29 crc kubenswrapper[4747]: I1001 06:21:29.152357 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:29 crc kubenswrapper[4747]: I1001 06:21:29.193859 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:29 crc kubenswrapper[4747]: I1001 06:21:29.292659 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:29 crc kubenswrapper[4747]: I1001 06:21:29.293038 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:29 crc kubenswrapper[4747]: I1001 06:21:29.312597 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-27npp" Oct 01 06:21:29 crc kubenswrapper[4747]: I1001 06:21:29.352239 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:21:30 crc kubenswrapper[4747]: I1001 06:21:30.325424 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9xksc" Oct 01 06:22:35 crc kubenswrapper[4747]: I1001 06:22:35.761284 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:22:35 crc kubenswrapper[4747]: I1001 06:22:35.762187 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:23:05 crc kubenswrapper[4747]: I1001 06:23:05.761065 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:23:05 crc kubenswrapper[4747]: I1001 06:23:05.761867 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:23:35 crc kubenswrapper[4747]: I1001 06:23:35.761203 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:23:35 crc kubenswrapper[4747]: I1001 06:23:35.763613 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:23:35 crc kubenswrapper[4747]: I1001 06:23:35.763930 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:23:35 crc kubenswrapper[4747]: I1001 06:23:35.765191 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b376cd219b804446b36aa80999cb1cd10015c3499940e5191b5df36c2cb92f37"} pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:23:35 crc kubenswrapper[4747]: I1001 06:23:35.765510 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" containerID="cri-o://b376cd219b804446b36aa80999cb1cd10015c3499940e5191b5df36c2cb92f37" gracePeriod=600 Oct 01 06:23:36 crc kubenswrapper[4747]: I1001 06:23:36.110098 4747 generic.go:334] "Generic (PLEG): container finished" podID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerID="b376cd219b804446b36aa80999cb1cd10015c3499940e5191b5df36c2cb92f37" exitCode=0 Oct 01 06:23:36 crc kubenswrapper[4747]: I1001 06:23:36.110180 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerDied","Data":"b376cd219b804446b36aa80999cb1cd10015c3499940e5191b5df36c2cb92f37"} Oct 01 06:23:36 crc kubenswrapper[4747]: I1001 06:23:36.110216 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"fb86108ac3eb45a3fbb1aa165dbc43fde8305d3de0de6e143c3c6aadae17a056"} Oct 01 06:23:36 crc kubenswrapper[4747]: I1001 06:23:36.110236 4747 scope.go:117] "RemoveContainer" containerID="f1f92129de573c3d25873c0eb1c94bef2f12eda520eac93ebf28b11592279721" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.036256 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-9pptq"] Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.037799 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.047861 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-9pptq"] Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.091010 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/60be877b-cb3d-4196-8552-1fec8a563d09-registry-certificates\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.091083 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.091126 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-registry-tls\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.091187 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60be877b-cb3d-4196-8552-1fec8a563d09-trusted-ca\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.091218 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/60be877b-cb3d-4196-8552-1fec8a563d09-ca-trust-extracted\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.091248 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/60be877b-cb3d-4196-8552-1fec8a563d09-installation-pull-secrets\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.091382 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k6wx\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-kube-api-access-8k6wx\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.091431 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-bound-sa-token\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.109475 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.192626 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/60be877b-cb3d-4196-8552-1fec8a563d09-registry-certificates\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.192684 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-registry-tls\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.192729 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60be877b-cb3d-4196-8552-1fec8a563d09-trusted-ca\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.192745 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/60be877b-cb3d-4196-8552-1fec8a563d09-ca-trust-extracted\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.192789 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/60be877b-cb3d-4196-8552-1fec8a563d09-installation-pull-secrets\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.192813 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k6wx\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-kube-api-access-8k6wx\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.192829 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-bound-sa-token\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.194150 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/60be877b-cb3d-4196-8552-1fec8a563d09-registry-certificates\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.194382 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60be877b-cb3d-4196-8552-1fec8a563d09-trusted-ca\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.194412 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/60be877b-cb3d-4196-8552-1fec8a563d09-ca-trust-extracted\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.199812 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-registry-tls\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.205041 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/60be877b-cb3d-4196-8552-1fec8a563d09-installation-pull-secrets\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.214155 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k6wx\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-kube-api-access-8k6wx\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.216997 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/60be877b-cb3d-4196-8552-1fec8a563d09-bound-sa-token\") pod \"image-registry-66df7c8f76-9pptq\" (UID: \"60be877b-cb3d-4196-8552-1fec8a563d09\") " pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.353824 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:23:59 crc kubenswrapper[4747]: I1001 06:23:59.585482 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-9pptq"] Oct 01 06:23:59 crc kubenswrapper[4747]: W1001 06:23:59.592191 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60be877b_cb3d_4196_8552_1fec8a563d09.slice/crio-9924734a391f8f4b82bef3728648da312ac10c1e7857beb554ef349a94d30248 WatchSource:0}: Error finding container 9924734a391f8f4b82bef3728648da312ac10c1e7857beb554ef349a94d30248: Status 404 returned error can't find the container with id 9924734a391f8f4b82bef3728648da312ac10c1e7857beb554ef349a94d30248 Oct 01 06:24:00 crc kubenswrapper[4747]: I1001 06:24:00.300063 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" event={"ID":"60be877b-cb3d-4196-8552-1fec8a563d09","Type":"ContainerStarted","Data":"daae4ac8e6c09ba1380e74d00440791aa720ca947949ed90bfe0de38374eead4"} Oct 01 06:24:00 crc kubenswrapper[4747]: I1001 06:24:00.300615 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" event={"ID":"60be877b-cb3d-4196-8552-1fec8a563d09","Type":"ContainerStarted","Data":"9924734a391f8f4b82bef3728648da312ac10c1e7857beb554ef349a94d30248"} Oct 01 06:24:00 crc kubenswrapper[4747]: I1001 06:24:00.301982 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:24:00 crc kubenswrapper[4747]: I1001 06:24:00.334166 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" podStartSLOduration=1.334141867 podStartE2EDuration="1.334141867s" podCreationTimestamp="2025-10-01 06:23:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:24:00.330860353 +0000 UTC m=+441.740517442" watchObservedRunningTime="2025-10-01 06:24:00.334141867 +0000 UTC m=+441.743798956" Oct 01 06:24:19 crc kubenswrapper[4747]: I1001 06:24:19.361633 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-9pptq" Oct 01 06:24:19 crc kubenswrapper[4747]: I1001 06:24:19.429969 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8mc8d"] Oct 01 06:24:44 crc kubenswrapper[4747]: I1001 06:24:44.482965 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" podUID="5de5adaf-4595-446d-9b77-a48824db2dfa" containerName="registry" containerID="cri-o://d810755b1081d5705b83256f16e3da7918be00f96b001f5de206fa0af0d3e379" gracePeriod=30 Oct 01 06:24:44 crc kubenswrapper[4747]: I1001 06:24:44.654136 4747 generic.go:334] "Generic (PLEG): container finished" podID="5de5adaf-4595-446d-9b77-a48824db2dfa" containerID="d810755b1081d5705b83256f16e3da7918be00f96b001f5de206fa0af0d3e379" exitCode=0 Oct 01 06:24:44 crc kubenswrapper[4747]: I1001 06:24:44.654202 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" event={"ID":"5de5adaf-4595-446d-9b77-a48824db2dfa","Type":"ContainerDied","Data":"d810755b1081d5705b83256f16e3da7918be00f96b001f5de206fa0af0d3e379"} Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.058220 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.207089 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5de5adaf-4595-446d-9b77-a48824db2dfa-ca-trust-extracted\") pod \"5de5adaf-4595-446d-9b77-a48824db2dfa\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.207189 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5de5adaf-4595-446d-9b77-a48824db2dfa-installation-pull-secrets\") pod \"5de5adaf-4595-446d-9b77-a48824db2dfa\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.207431 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"5de5adaf-4595-446d-9b77-a48824db2dfa\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.207496 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-trusted-ca\") pod \"5de5adaf-4595-446d-9b77-a48824db2dfa\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.207590 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-certificates\") pod \"5de5adaf-4595-446d-9b77-a48824db2dfa\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.207644 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-bound-sa-token\") pod \"5de5adaf-4595-446d-9b77-a48824db2dfa\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.207676 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-tls\") pod \"5de5adaf-4595-446d-9b77-a48824db2dfa\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.207720 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jg6f\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-kube-api-access-7jg6f\") pod \"5de5adaf-4595-446d-9b77-a48824db2dfa\" (UID: \"5de5adaf-4595-446d-9b77-a48824db2dfa\") " Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.209286 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "5de5adaf-4595-446d-9b77-a48824db2dfa" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.209471 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "5de5adaf-4595-446d-9b77-a48824db2dfa" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.217401 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5de5adaf-4595-446d-9b77-a48824db2dfa-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "5de5adaf-4595-446d-9b77-a48824db2dfa" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.217463 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "5de5adaf-4595-446d-9b77-a48824db2dfa" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.217302 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-kube-api-access-7jg6f" (OuterVolumeSpecName: "kube-api-access-7jg6f") pod "5de5adaf-4595-446d-9b77-a48824db2dfa" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa"). InnerVolumeSpecName "kube-api-access-7jg6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.218934 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "5de5adaf-4595-446d-9b77-a48824db2dfa" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.224607 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "5de5adaf-4595-446d-9b77-a48824db2dfa" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.239676 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5de5adaf-4595-446d-9b77-a48824db2dfa-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "5de5adaf-4595-446d-9b77-a48824db2dfa" (UID: "5de5adaf-4595-446d-9b77-a48824db2dfa"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.308925 4747 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.308969 4747 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.308990 4747 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.309009 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jg6f\" (UniqueName: \"kubernetes.io/projected/5de5adaf-4595-446d-9b77-a48824db2dfa-kube-api-access-7jg6f\") on node \"crc\" DevicePath \"\"" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.309029 4747 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5de5adaf-4595-446d-9b77-a48824db2dfa-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.309048 4747 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5de5adaf-4595-446d-9b77-a48824db2dfa-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.309067 4747 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5de5adaf-4595-446d-9b77-a48824db2dfa-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.663520 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" event={"ID":"5de5adaf-4595-446d-9b77-a48824db2dfa","Type":"ContainerDied","Data":"5ebfae64562aa19f8c46fad1b968dfc5f6e821b62855d0a3425667f3f8c508c1"} Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.663601 4747 scope.go:117] "RemoveContainer" containerID="d810755b1081d5705b83256f16e3da7918be00f96b001f5de206fa0af0d3e379" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.663597 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-8mc8d" Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.700229 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8mc8d"] Oct 01 06:24:45 crc kubenswrapper[4747]: I1001 06:24:45.702534 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-8mc8d"] Oct 01 06:24:47 crc kubenswrapper[4747]: I1001 06:24:47.288438 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5de5adaf-4595-446d-9b77-a48824db2dfa" path="/var/lib/kubelet/pods/5de5adaf-4595-446d-9b77-a48824db2dfa/volumes" Oct 01 06:26:05 crc kubenswrapper[4747]: I1001 06:26:05.761386 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:26:05 crc kubenswrapper[4747]: I1001 06:26:05.762283 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:26:35 crc kubenswrapper[4747]: I1001 06:26:35.761686 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:26:35 crc kubenswrapper[4747]: I1001 06:26:35.762684 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:26:36 crc kubenswrapper[4747]: I1001 06:26:36.973607 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p48hw"] Oct 01 06:26:36 crc kubenswrapper[4747]: I1001 06:26:36.974563 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovn-controller" containerID="cri-o://101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4" gracePeriod=30 Oct 01 06:26:36 crc kubenswrapper[4747]: I1001 06:26:36.974637 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" containerName="nbdb" containerID="cri-o://21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e" gracePeriod=30 Oct 01 06:26:36 crc kubenswrapper[4747]: I1001 06:26:36.974699 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" containerName="kube-rbac-proxy-node" containerID="cri-o://a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537" gracePeriod=30 Oct 01 06:26:36 crc kubenswrapper[4747]: I1001 06:26:36.974744 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" containerName="northd" containerID="cri-o://a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5" gracePeriod=30 Oct 01 06:26:36 crc kubenswrapper[4747]: I1001 06:26:36.974693 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4" gracePeriod=30 Oct 01 06:26:36 crc kubenswrapper[4747]: I1001 06:26:36.974723 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovn-acl-logging" containerID="cri-o://095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d" gracePeriod=30 Oct 01 06:26:36 crc kubenswrapper[4747]: I1001 06:26:36.975032 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" containerName="sbdb" containerID="cri-o://90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776" gracePeriod=30 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.018640 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" containerID="cri-o://802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" gracePeriod=30 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.280810 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/3.log" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.283420 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovn-acl-logging/0.log" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.285465 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovn-controller/0.log" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.286176 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.333250 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hdnnp"] Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.333612 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovn-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.333672 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovn-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.333723 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="kube-rbac-proxy-node" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.333789 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="kube-rbac-proxy-node" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.333839 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="nbdb" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.333890 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="nbdb" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.333939 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334006 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334057 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="kubecfg-setup" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334103 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="kubecfg-setup" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334150 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5de5adaf-4595-446d-9b77-a48824db2dfa" containerName="registry" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334195 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="5de5adaf-4595-446d-9b77-a48824db2dfa" containerName="registry" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334245 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="sbdb" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334290 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="sbdb" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334338 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334383 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334443 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334491 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334542 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="northd" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334594 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="northd" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334642 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334701 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334766 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334813 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.334865 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovn-acl-logging" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.334909 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovn-acl-logging" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335049 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="kube-rbac-proxy-ovn-metrics" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335173 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovn-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335225 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="5de5adaf-4595-446d-9b77-a48824db2dfa" containerName="registry" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335273 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="northd" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335336 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="sbdb" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335389 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="kube-rbac-proxy-node" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335432 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335485 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335532 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335581 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovn-acl-logging" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335627 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335672 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335719 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="38849139-d385-42a4-adab-687566065973" containerName="nbdb" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.335871 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.335925 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="38849139-d385-42a4-adab-687566065973" containerName="ovnkube-controller" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.337549 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.431474 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovnkube-controller/3.log" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.434501 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovn-acl-logging/0.log" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.435147 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-p48hw_38849139-d385-42a4-adab-687566065973/ovn-controller/0.log" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.435614 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" exitCode=0 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.435763 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776" exitCode=0 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.435729 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.436862 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.436928 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.436919 4747 scope.go:117] "RemoveContainer" containerID="802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.435880 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.436741 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e" exitCode=0 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.437826 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5" exitCode=0 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.437867 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.437933 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.437886 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4" exitCode=0 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.437979 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537" exitCode=0 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438000 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d" exitCode=143 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438014 4747 generic.go:334] "Generic (PLEG): container finished" podID="38849139-d385-42a4-adab-687566065973" containerID="101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4" exitCode=143 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438091 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438159 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438178 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438191 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438203 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438213 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438224 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438236 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438247 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438258 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438278 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438299 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438313 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438325 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438335 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438346 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438356 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438368 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438378 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438391 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438402 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438416 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438432 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438444 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438456 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438467 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438478 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438513 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438524 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438534 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438545 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438555 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438569 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-p48hw" event={"ID":"38849139-d385-42a4-adab-687566065973","Type":"ContainerDied","Data":"c15abffcb32aae9b0c99d930b994d794778fa732156bdf5595f6462cbcbea056"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438585 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438597 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438608 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438619 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438629 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438641 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438651 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438661 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438672 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.438682 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.445583 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/2.log" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.447857 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/1.log" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.448008 4747 generic.go:334] "Generic (PLEG): container finished" podID="e2f2b780-a19d-4581-92f4-ca25c69a263c" containerID="a3b36be96e97a5e64fec3b1e8acc8ceb3e2ab46403b34da703e1f97a3e702b3d" exitCode=2 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.448103 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvmkj" event={"ID":"e2f2b780-a19d-4581-92f4-ca25c69a263c","Type":"ContainerDied","Data":"a3b36be96e97a5e64fec3b1e8acc8ceb3e2ab46403b34da703e1f97a3e702b3d"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.448142 4747 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5"} Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449641 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-node-log\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449704 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-kubelet\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449789 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-script-lib\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449832 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-etc-openvswitch\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449866 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-netns\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449892 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-log-socket\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449885 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-node-log" (OuterVolumeSpecName: "node-log") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449920 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-var-lib-openvswitch\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449950 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-netd\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449957 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449990 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zjbg\" (UniqueName: \"kubernetes.io/projected/38849139-d385-42a4-adab-687566065973-kube-api-access-2zjbg\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.449999 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450027 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-config\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450036 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450061 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-ovn\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450071 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450097 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-openvswitch\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450106 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-log-socket" (OuterVolumeSpecName: "log-socket") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450170 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-slash\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450215 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-bin\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450254 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/38849139-d385-42a4-adab-687566065973-ovn-node-metrics-cert\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450288 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-systemd-units\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450316 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-env-overrides\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450348 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-ovn-kubernetes\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450374 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-systemd\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450405 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-var-lib-cni-networks-ovn-kubernetes\") pod \"38849139-d385-42a4-adab-687566065973\" (UID: \"38849139-d385-42a4-adab-687566065973\") " Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450527 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450588 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-log-socket\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.450686 4747 scope.go:117] "RemoveContainer" containerID="a3b36be96e97a5e64fec3b1e8acc8ceb3e2ab46403b34da703e1f97a3e702b3d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.451072 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.451213 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.451284 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.451368 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-slash" (OuterVolumeSpecName: "host-slash") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.451457 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.451483 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-pvmkj_openshift-multus(e2f2b780-a19d-4581-92f4-ca25c69a263c)\"" pod="openshift-multus/multus-pvmkj" podUID="e2f2b780-a19d-4581-92f4-ca25c69a263c" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.451568 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.451593 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452130 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452248 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452279 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plk4n\" (UniqueName: \"kubernetes.io/projected/e9109090-8aaa-43bd-8510-ede77da3f047-kube-api-access-plk4n\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452457 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-etc-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452559 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-node-log\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452666 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-slash\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452808 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452889 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.452982 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-run-ovn-kubernetes\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453062 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-var-lib-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453163 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-ovnkube-script-lib\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453300 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9109090-8aaa-43bd-8510-ede77da3f047-ovn-node-metrics-cert\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453401 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-systemd-units\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453474 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-env-overrides\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453573 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-systemd\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453663 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-ovn\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-cni-netd\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.453952 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-run-netns\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454031 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-kubelet\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454169 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-cni-bin\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454253 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-ovnkube-config\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454357 4747 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454418 4747 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454474 4747 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454560 4747 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-node-log\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454627 4747 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454694 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454780 4747 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454875 4747 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.454955 4747 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455027 4747 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-log-socket\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455103 4747 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455182 4747 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/38849139-d385-42a4-adab-687566065973-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455260 4747 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455333 4747 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455396 4747 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-slash\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455451 4747 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455681 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38849139-d385-42a4-adab-687566065973-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.455786 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.456499 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38849139-d385-42a4-adab-687566065973-kube-api-access-2zjbg" (OuterVolumeSpecName: "kube-api-access-2zjbg") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "kube-api-access-2zjbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.459677 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.478444 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "38849139-d385-42a4-adab-687566065973" (UID: "38849139-d385-42a4-adab-687566065973"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.481879 4747 scope.go:117] "RemoveContainer" containerID="90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.495230 4747 scope.go:117] "RemoveContainer" containerID="21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.506290 4747 scope.go:117] "RemoveContainer" containerID="a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.516671 4747 scope.go:117] "RemoveContainer" containerID="1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.539443 4747 scope.go:117] "RemoveContainer" containerID="a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.553792 4747 scope.go:117] "RemoveContainer" containerID="095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.556913 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-systemd\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.556960 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-ovn\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.556989 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-cni-netd\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557017 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-run-netns\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-kubelet\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557071 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-cni-bin\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557093 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-ovnkube-config\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557097 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-ovn\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557117 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-log-socket\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557151 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-systemd\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557169 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plk4n\" (UniqueName: \"kubernetes.io/projected/e9109090-8aaa-43bd-8510-ede77da3f047-kube-api-access-plk4n\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557187 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-cni-bin\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557196 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-etc-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557221 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-cni-netd\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-node-log\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-node-log\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557269 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-slash\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557312 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-run-netns\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557327 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557345 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-kubelet\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557353 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557380 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-run-ovn-kubernetes\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557404 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-var-lib-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557424 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-ovnkube-script-lib\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557468 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9109090-8aaa-43bd-8510-ede77da3f047-ovn-node-metrics-cert\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557495 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-systemd-units\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557516 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-env-overrides\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557580 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zjbg\" (UniqueName: \"kubernetes.io/projected/38849139-d385-42a4-adab-687566065973-kube-api-access-2zjbg\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557598 4747 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/38849139-d385-42a4-adab-687566065973-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557613 4747 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557626 4747 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/38849139-d385-42a4-adab-687566065973-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.558044 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-etc-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.557291 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-slash\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.558097 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.558197 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-ovnkube-config\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.558241 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-log-socket\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.558391 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-run-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.558435 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-systemd-units\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.558586 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-host-run-ovn-kubernetes\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.558660 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/e9109090-8aaa-43bd-8510-ede77da3f047-var-lib-openvswitch\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.559288 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-env-overrides\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.559971 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/e9109090-8aaa-43bd-8510-ede77da3f047-ovnkube-script-lib\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.561552 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e9109090-8aaa-43bd-8510-ede77da3f047-ovn-node-metrics-cert\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.565875 4747 scope.go:117] "RemoveContainer" containerID="101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.576161 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plk4n\" (UniqueName: \"kubernetes.io/projected/e9109090-8aaa-43bd-8510-ede77da3f047-kube-api-access-plk4n\") pod \"ovnkube-node-hdnnp\" (UID: \"e9109090-8aaa-43bd-8510-ede77da3f047\") " pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.578961 4747 scope.go:117] "RemoveContainer" containerID="707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.593522 4747 scope.go:117] "RemoveContainer" containerID="802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.593945 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": container with ID starting with 802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0 not found: ID does not exist" containerID="802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.594045 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} err="failed to get container status \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": rpc error: code = NotFound desc = could not find container \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": container with ID starting with 802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.594135 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.594499 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": container with ID starting with 820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497 not found: ID does not exist" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.594606 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} err="failed to get container status \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": rpc error: code = NotFound desc = could not find container \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": container with ID starting with 820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.594678 4747 scope.go:117] "RemoveContainer" containerID="90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.595082 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": container with ID starting with 90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776 not found: ID does not exist" containerID="90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.595157 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} err="failed to get container status \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": rpc error: code = NotFound desc = could not find container \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": container with ID starting with 90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.595306 4747 scope.go:117] "RemoveContainer" containerID="21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.595613 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": container with ID starting with 21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e not found: ID does not exist" containerID="21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.595701 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} err="failed to get container status \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": rpc error: code = NotFound desc = could not find container \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": container with ID starting with 21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.595787 4747 scope.go:117] "RemoveContainer" containerID="a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.596060 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": container with ID starting with a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5 not found: ID does not exist" containerID="a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.596090 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} err="failed to get container status \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": rpc error: code = NotFound desc = could not find container \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": container with ID starting with a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.596109 4747 scope.go:117] "RemoveContainer" containerID="1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.596338 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": container with ID starting with 1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4 not found: ID does not exist" containerID="1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.596423 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} err="failed to get container status \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": rpc error: code = NotFound desc = could not find container \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": container with ID starting with 1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.596486 4747 scope.go:117] "RemoveContainer" containerID="a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.596785 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": container with ID starting with a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537 not found: ID does not exist" containerID="a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.596861 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} err="failed to get container status \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": rpc error: code = NotFound desc = could not find container \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": container with ID starting with a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.596931 4747 scope.go:117] "RemoveContainer" containerID="095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.597146 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": container with ID starting with 095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d not found: ID does not exist" containerID="095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.597227 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} err="failed to get container status \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": rpc error: code = NotFound desc = could not find container \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": container with ID starting with 095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.597284 4747 scope.go:117] "RemoveContainer" containerID="101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.597497 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": container with ID starting with 101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4 not found: ID does not exist" containerID="101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.597574 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} err="failed to get container status \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": rpc error: code = NotFound desc = could not find container \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": container with ID starting with 101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.597631 4747 scope.go:117] "RemoveContainer" containerID="707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d" Oct 01 06:26:37 crc kubenswrapper[4747]: E1001 06:26:37.598142 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": container with ID starting with 707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d not found: ID does not exist" containerID="707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.598231 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} err="failed to get container status \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": rpc error: code = NotFound desc = could not find container \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": container with ID starting with 707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.598292 4747 scope.go:117] "RemoveContainer" containerID="802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.598641 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} err="failed to get container status \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": rpc error: code = NotFound desc = could not find container \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": container with ID starting with 802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.598666 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.599025 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} err="failed to get container status \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": rpc error: code = NotFound desc = could not find container \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": container with ID starting with 820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.599115 4747 scope.go:117] "RemoveContainer" containerID="90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.599426 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} err="failed to get container status \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": rpc error: code = NotFound desc = could not find container \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": container with ID starting with 90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.599510 4747 scope.go:117] "RemoveContainer" containerID="21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.599930 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} err="failed to get container status \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": rpc error: code = NotFound desc = could not find container \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": container with ID starting with 21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.600011 4747 scope.go:117] "RemoveContainer" containerID="a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.600466 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} err="failed to get container status \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": rpc error: code = NotFound desc = could not find container \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": container with ID starting with a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.600543 4747 scope.go:117] "RemoveContainer" containerID="1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.600948 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} err="failed to get container status \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": rpc error: code = NotFound desc = could not find container \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": container with ID starting with 1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.601059 4747 scope.go:117] "RemoveContainer" containerID="a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.601376 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} err="failed to get container status \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": rpc error: code = NotFound desc = could not find container \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": container with ID starting with a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.601490 4747 scope.go:117] "RemoveContainer" containerID="095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.602100 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} err="failed to get container status \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": rpc error: code = NotFound desc = could not find container \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": container with ID starting with 095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.602122 4747 scope.go:117] "RemoveContainer" containerID="101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.602900 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} err="failed to get container status \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": rpc error: code = NotFound desc = could not find container \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": container with ID starting with 101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.602963 4747 scope.go:117] "RemoveContainer" containerID="707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.603366 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} err="failed to get container status \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": rpc error: code = NotFound desc = could not find container \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": container with ID starting with 707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.603393 4747 scope.go:117] "RemoveContainer" containerID="802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.603731 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} err="failed to get container status \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": rpc error: code = NotFound desc = could not find container \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": container with ID starting with 802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.603770 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.604020 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} err="failed to get container status \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": rpc error: code = NotFound desc = could not find container \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": container with ID starting with 820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.604114 4747 scope.go:117] "RemoveContainer" containerID="90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.604619 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} err="failed to get container status \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": rpc error: code = NotFound desc = could not find container \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": container with ID starting with 90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.604644 4747 scope.go:117] "RemoveContainer" containerID="21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.604958 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} err="failed to get container status \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": rpc error: code = NotFound desc = could not find container \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": container with ID starting with 21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.605055 4747 scope.go:117] "RemoveContainer" containerID="a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.605345 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} err="failed to get container status \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": rpc error: code = NotFound desc = could not find container \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": container with ID starting with a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.605367 4747 scope.go:117] "RemoveContainer" containerID="1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.605781 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} err="failed to get container status \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": rpc error: code = NotFound desc = could not find container \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": container with ID starting with 1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.605875 4747 scope.go:117] "RemoveContainer" containerID="a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.606354 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} err="failed to get container status \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": rpc error: code = NotFound desc = could not find container \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": container with ID starting with a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.606438 4747 scope.go:117] "RemoveContainer" containerID="095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.606840 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} err="failed to get container status \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": rpc error: code = NotFound desc = could not find container \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": container with ID starting with 095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.606890 4747 scope.go:117] "RemoveContainer" containerID="101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.607227 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} err="failed to get container status \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": rpc error: code = NotFound desc = could not find container \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": container with ID starting with 101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.607337 4747 scope.go:117] "RemoveContainer" containerID="707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.607848 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} err="failed to get container status \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": rpc error: code = NotFound desc = could not find container \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": container with ID starting with 707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.607894 4747 scope.go:117] "RemoveContainer" containerID="802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.608289 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} err="failed to get container status \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": rpc error: code = NotFound desc = could not find container \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": container with ID starting with 802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.608314 4747 scope.go:117] "RemoveContainer" containerID="820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.608617 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497"} err="failed to get container status \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": rpc error: code = NotFound desc = could not find container \"820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497\": container with ID starting with 820cfbb14222db2849843e2a264a620b6d820df6b2b11483d0795dfe5fc58497 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.608701 4747 scope.go:117] "RemoveContainer" containerID="90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.609049 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776"} err="failed to get container status \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": rpc error: code = NotFound desc = could not find container \"90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776\": container with ID starting with 90dd6cf48711b41b67545872445ad7fdf65a78ce48b40651604bc2165035b776 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.609134 4747 scope.go:117] "RemoveContainer" containerID="21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.609468 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e"} err="failed to get container status \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": rpc error: code = NotFound desc = could not find container \"21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e\": container with ID starting with 21cfc2808175b5aacd880bdab1069a0c7cce3bbe11f7a0bc97ea3ef59756544e not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.609559 4747 scope.go:117] "RemoveContainer" containerID="a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.609869 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5"} err="failed to get container status \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": rpc error: code = NotFound desc = could not find container \"a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5\": container with ID starting with a69ef9a589b11e514aa9dc6cb41522bc0990b790d2234df23cfc4debebdee1b5 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.609949 4747 scope.go:117] "RemoveContainer" containerID="1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.610376 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4"} err="failed to get container status \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": rpc error: code = NotFound desc = could not find container \"1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4\": container with ID starting with 1700be57a4c076771aa107e2ca25571e2a0b80c9b7eeca467eebaafe83871de4 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.610399 4747 scope.go:117] "RemoveContainer" containerID="a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.610669 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537"} err="failed to get container status \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": rpc error: code = NotFound desc = could not find container \"a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537\": container with ID starting with a60ce41d87537e2b7e0368fff72c61e0ee079c0533c8c777c137759bb4093537 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.610790 4747 scope.go:117] "RemoveContainer" containerID="095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.611093 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d"} err="failed to get container status \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": rpc error: code = NotFound desc = could not find container \"095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d\": container with ID starting with 095ee4a06bacbe59ac0eb5a28fe52df1f2a83b0797ed4a9e31071c370ff0a31d not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.611115 4747 scope.go:117] "RemoveContainer" containerID="101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.611354 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4"} err="failed to get container status \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": rpc error: code = NotFound desc = could not find container \"101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4\": container with ID starting with 101857f46c20c5dc3c7ac3877959e9c117f6f96187c87f01b6190911d439a8a4 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.611454 4747 scope.go:117] "RemoveContainer" containerID="707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.611871 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d"} err="failed to get container status \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": rpc error: code = NotFound desc = could not find container \"707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d\": container with ID starting with 707adf4b7c39f40f19f458e4193eff4f7bbf738e6d98788091dc284d792d516d not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.611901 4747 scope.go:117] "RemoveContainer" containerID="802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.612327 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0"} err="failed to get container status \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": rpc error: code = NotFound desc = could not find container \"802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0\": container with ID starting with 802ee9bfb2d0faffe02dd6124ade123c867866caf9e34fa7c1050eb7832d3ec0 not found: ID does not exist" Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.649928 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:37 crc kubenswrapper[4747]: W1001 06:26:37.681352 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode9109090_8aaa_43bd_8510_ede77da3f047.slice/crio-bf10c1b4f9c07460bed8bacc4f708990ec1ca90ddb58ab8d82f7b0359b070530 WatchSource:0}: Error finding container bf10c1b4f9c07460bed8bacc4f708990ec1ca90ddb58ab8d82f7b0359b070530: Status 404 returned error can't find the container with id bf10c1b4f9c07460bed8bacc4f708990ec1ca90ddb58ab8d82f7b0359b070530 Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.788484 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p48hw"] Oct 01 06:26:37 crc kubenswrapper[4747]: I1001 06:26:37.797536 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-p48hw"] Oct 01 06:26:38 crc kubenswrapper[4747]: I1001 06:26:38.456919 4747 generic.go:334] "Generic (PLEG): container finished" podID="e9109090-8aaa-43bd-8510-ede77da3f047" containerID="f999825d3be511bfeee004207ba7ab5eb011778f8fa5172b9a296b27685a763a" exitCode=0 Oct 01 06:26:38 crc kubenswrapper[4747]: I1001 06:26:38.457033 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerDied","Data":"f999825d3be511bfeee004207ba7ab5eb011778f8fa5172b9a296b27685a763a"} Oct 01 06:26:38 crc kubenswrapper[4747]: I1001 06:26:38.457104 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"bf10c1b4f9c07460bed8bacc4f708990ec1ca90ddb58ab8d82f7b0359b070530"} Oct 01 06:26:39 crc kubenswrapper[4747]: I1001 06:26:39.297887 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38849139-d385-42a4-adab-687566065973" path="/var/lib/kubelet/pods/38849139-d385-42a4-adab-687566065973/volumes" Oct 01 06:26:39 crc kubenswrapper[4747]: I1001 06:26:39.461880 4747 scope.go:117] "RemoveContainer" containerID="9ddd1dfd830b83669b9797e0821bee7b4e6f0f46b87b6e6c315085b8a0bcbea5" Oct 01 06:26:39 crc kubenswrapper[4747]: I1001 06:26:39.473004 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"fe7eeccbc62d6325a51b6ca7a8f179783e0108c129548d57bc7603caa5c14725"} Oct 01 06:26:39 crc kubenswrapper[4747]: I1001 06:26:39.473091 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"4562f4b015e565112f7c641ec0814c536c3b7d4abc91825c86eeb1ff39ca475f"} Oct 01 06:26:39 crc kubenswrapper[4747]: I1001 06:26:39.473135 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"bf681d20a785e024c8e956af6eb4faba645fd0097726fd4d7b0bfffaf0208756"} Oct 01 06:26:39 crc kubenswrapper[4747]: I1001 06:26:39.473154 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"fec9feac50d3a1b1c09d42fdba2d4f7b84d98fe6142f185feb16a1192b486cfa"} Oct 01 06:26:39 crc kubenswrapper[4747]: I1001 06:26:39.473168 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"1979be7165bf4a5b9e03c4f838b81c2b277ae4f25eec5097d8c9c44ed1b666bb"} Oct 01 06:26:39 crc kubenswrapper[4747]: I1001 06:26:39.473182 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"19322f2c153acca56d1be1eec54fd6632b5ce4f878efd83b72c760f1b76d11c6"} Oct 01 06:26:40 crc kubenswrapper[4747]: I1001 06:26:40.484983 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/2.log" Oct 01 06:26:42 crc kubenswrapper[4747]: I1001 06:26:42.504490 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"382125dc51b249e7208c0306d4063c01dbd728e47af09542726a76c7c8390750"} Oct 01 06:26:44 crc kubenswrapper[4747]: I1001 06:26:44.521162 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" event={"ID":"e9109090-8aaa-43bd-8510-ede77da3f047","Type":"ContainerStarted","Data":"e2118031f26549bf857b02dbd29287dfa2ca7c6cb2da72c7e98416946f2e4dd0"} Oct 01 06:26:44 crc kubenswrapper[4747]: I1001 06:26:44.521517 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:44 crc kubenswrapper[4747]: I1001 06:26:44.521533 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:44 crc kubenswrapper[4747]: I1001 06:26:44.555814 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:44 crc kubenswrapper[4747]: I1001 06:26:44.573237 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" podStartSLOduration=7.573219429 podStartE2EDuration="7.573219429s" podCreationTimestamp="2025-10-01 06:26:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:26:44.569726132 +0000 UTC m=+605.979383201" watchObservedRunningTime="2025-10-01 06:26:44.573219429 +0000 UTC m=+605.982876488" Oct 01 06:26:45 crc kubenswrapper[4747]: I1001 06:26:45.528035 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:45 crc kubenswrapper[4747]: I1001 06:26:45.569622 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:26:52 crc kubenswrapper[4747]: I1001 06:26:52.276803 4747 scope.go:117] "RemoveContainer" containerID="a3b36be96e97a5e64fec3b1e8acc8ceb3e2ab46403b34da703e1f97a3e702b3d" Oct 01 06:26:52 crc kubenswrapper[4747]: E1001 06:26:52.277710 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-pvmkj_openshift-multus(e2f2b780-a19d-4581-92f4-ca25c69a263c)\"" pod="openshift-multus/multus-pvmkj" podUID="e2f2b780-a19d-4581-92f4-ca25c69a263c" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.565622 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw"] Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.567838 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.570331 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.588807 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw"] Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.628930 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cs5d\" (UniqueName: \"kubernetes.io/projected/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-kube-api-access-6cs5d\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.629012 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.629219 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.730562 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cs5d\" (UniqueName: \"kubernetes.io/projected/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-kube-api-access-6cs5d\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.731059 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.731149 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.731962 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-util\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.732044 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-bundle\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.763540 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cs5d\" (UniqueName: \"kubernetes.io/projected/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-kube-api-access-6cs5d\") pod \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: I1001 06:27:03.887602 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: E1001 06:27:03.937240 4747 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1_0(dfe602e89227244583c77e151435eada098d35533f3229f7d1e8a64d6569801e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 06:27:03 crc kubenswrapper[4747]: E1001 06:27:03.937383 4747 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1_0(dfe602e89227244583c77e151435eada098d35533f3229f7d1e8a64d6569801e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: E1001 06:27:03.937436 4747 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1_0(dfe602e89227244583c77e151435eada098d35533f3229f7d1e8a64d6569801e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:03 crc kubenswrapper[4747]: E1001 06:27:03.937543 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace(68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace(68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1_0(dfe602e89227244583c77e151435eada098d35533f3229f7d1e8a64d6569801e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" Oct 01 06:27:04 crc kubenswrapper[4747]: I1001 06:27:04.277138 4747 scope.go:117] "RemoveContainer" containerID="a3b36be96e97a5e64fec3b1e8acc8ceb3e2ab46403b34da703e1f97a3e702b3d" Oct 01 06:27:04 crc kubenswrapper[4747]: I1001 06:27:04.653622 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pvmkj_e2f2b780-a19d-4581-92f4-ca25c69a263c/kube-multus/2.log" Oct 01 06:27:04 crc kubenswrapper[4747]: I1001 06:27:04.654163 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:04 crc kubenswrapper[4747]: I1001 06:27:04.654846 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:04 crc kubenswrapper[4747]: I1001 06:27:04.655048 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pvmkj" event={"ID":"e2f2b780-a19d-4581-92f4-ca25c69a263c","Type":"ContainerStarted","Data":"1fdf1b0ede93084b083141327371c140d18ce27cff5cbfd238e01f44eaef05bd"} Oct 01 06:27:04 crc kubenswrapper[4747]: E1001 06:27:04.705652 4747 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1_0(07f494724b663b24f63f68d6249eaed5a54abb2a3c855e8077a7582f09960aa5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 01 06:27:04 crc kubenswrapper[4747]: E1001 06:27:04.705813 4747 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1_0(07f494724b663b24f63f68d6249eaed5a54abb2a3c855e8077a7582f09960aa5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:04 crc kubenswrapper[4747]: E1001 06:27:04.705864 4747 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1_0(07f494724b663b24f63f68d6249eaed5a54abb2a3c855e8077a7582f09960aa5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:04 crc kubenswrapper[4747]: E1001 06:27:04.705950 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace(68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace(68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_openshift-marketplace_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1_0(07f494724b663b24f63f68d6249eaed5a54abb2a3c855e8077a7582f09960aa5): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" Oct 01 06:27:05 crc kubenswrapper[4747]: I1001 06:27:05.761168 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:27:05 crc kubenswrapper[4747]: I1001 06:27:05.761291 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:27:05 crc kubenswrapper[4747]: I1001 06:27:05.761373 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:27:05 crc kubenswrapper[4747]: I1001 06:27:05.762241 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fb86108ac3eb45a3fbb1aa165dbc43fde8305d3de0de6e143c3c6aadae17a056"} pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:27:05 crc kubenswrapper[4747]: I1001 06:27:05.762357 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" containerID="cri-o://fb86108ac3eb45a3fbb1aa165dbc43fde8305d3de0de6e143c3c6aadae17a056" gracePeriod=600 Oct 01 06:27:06 crc kubenswrapper[4747]: I1001 06:27:06.673198 4747 generic.go:334] "Generic (PLEG): container finished" podID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerID="fb86108ac3eb45a3fbb1aa165dbc43fde8305d3de0de6e143c3c6aadae17a056" exitCode=0 Oct 01 06:27:06 crc kubenswrapper[4747]: I1001 06:27:06.673304 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerDied","Data":"fb86108ac3eb45a3fbb1aa165dbc43fde8305d3de0de6e143c3c6aadae17a056"} Oct 01 06:27:06 crc kubenswrapper[4747]: I1001 06:27:06.673578 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"779a7206e770a3d3eff5fabc4a08045c36917bf566f468ea4269d33bee1db67f"} Oct 01 06:27:06 crc kubenswrapper[4747]: I1001 06:27:06.673615 4747 scope.go:117] "RemoveContainer" containerID="b376cd219b804446b36aa80999cb1cd10015c3499940e5191b5df36c2cb92f37" Oct 01 06:27:07 crc kubenswrapper[4747]: I1001 06:27:07.695592 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hdnnp" Oct 01 06:27:17 crc kubenswrapper[4747]: I1001 06:27:17.276350 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:17 crc kubenswrapper[4747]: I1001 06:27:17.277684 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:17 crc kubenswrapper[4747]: I1001 06:27:17.535225 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw"] Oct 01 06:27:17 crc kubenswrapper[4747]: I1001 06:27:17.766806 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" event={"ID":"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1","Type":"ContainerStarted","Data":"771630038adee564ea1bf6645ed3f52ed3daf1b82c14d05a3f93179b9200b0ae"} Oct 01 06:27:17 crc kubenswrapper[4747]: I1001 06:27:17.767282 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" event={"ID":"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1","Type":"ContainerStarted","Data":"82a2669628ff63b0831268fc6cce3bf01ce1b92a639c89ebf26f4615ba2523e3"} Oct 01 06:27:18 crc kubenswrapper[4747]: I1001 06:27:18.777589 4747 generic.go:334] "Generic (PLEG): container finished" podID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerID="771630038adee564ea1bf6645ed3f52ed3daf1b82c14d05a3f93179b9200b0ae" exitCode=0 Oct 01 06:27:18 crc kubenswrapper[4747]: I1001 06:27:18.777654 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" event={"ID":"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1","Type":"ContainerDied","Data":"771630038adee564ea1bf6645ed3f52ed3daf1b82c14d05a3f93179b9200b0ae"} Oct 01 06:27:18 crc kubenswrapper[4747]: I1001 06:27:18.781350 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:27:20 crc kubenswrapper[4747]: I1001 06:27:20.793982 4747 generic.go:334] "Generic (PLEG): container finished" podID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerID="02029fdd2d55ef08280cb97ba862087c316b37251293262c712d63ccdd307a9a" exitCode=0 Oct 01 06:27:20 crc kubenswrapper[4747]: I1001 06:27:20.794067 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" event={"ID":"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1","Type":"ContainerDied","Data":"02029fdd2d55ef08280cb97ba862087c316b37251293262c712d63ccdd307a9a"} Oct 01 06:27:21 crc kubenswrapper[4747]: I1001 06:27:21.804686 4747 generic.go:334] "Generic (PLEG): container finished" podID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerID="a6bc31e41a8f2a52f4b40506e8c59807cfd180889a558166193206cf77f171f4" exitCode=0 Oct 01 06:27:21 crc kubenswrapper[4747]: I1001 06:27:21.804841 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" event={"ID":"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1","Type":"ContainerDied","Data":"a6bc31e41a8f2a52f4b40506e8c59807cfd180889a558166193206cf77f171f4"} Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.127470 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.197803 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-bundle\") pod \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.197876 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-util\") pod \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.198115 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cs5d\" (UniqueName: \"kubernetes.io/projected/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-kube-api-access-6cs5d\") pod \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\" (UID: \"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1\") " Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.200045 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-bundle" (OuterVolumeSpecName: "bundle") pod "68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" (UID: "68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.207516 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-kube-api-access-6cs5d" (OuterVolumeSpecName: "kube-api-access-6cs5d") pod "68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" (UID: "68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1"). InnerVolumeSpecName "kube-api-access-6cs5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.220269 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-util" (OuterVolumeSpecName: "util") pod "68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" (UID: "68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.299285 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cs5d\" (UniqueName: \"kubernetes.io/projected/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-kube-api-access-6cs5d\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.299317 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.299330 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1-util\") on node \"crc\" DevicePath \"\"" Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.824781 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" event={"ID":"68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1","Type":"ContainerDied","Data":"82a2669628ff63b0831268fc6cce3bf01ce1b92a639c89ebf26f4615ba2523e3"} Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.824841 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82a2669628ff63b0831268fc6cce3bf01ce1b92a639c89ebf26f4615ba2523e3" Oct 01 06:27:23 crc kubenswrapper[4747]: I1001 06:27:23.824864 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.233024 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj"] Oct 01 06:27:37 crc kubenswrapper[4747]: E1001 06:27:37.233597 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerName="extract" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.233610 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerName="extract" Oct 01 06:27:37 crc kubenswrapper[4747]: E1001 06:27:37.233630 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerName="util" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.233635 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerName="util" Oct 01 06:27:37 crc kubenswrapper[4747]: E1001 06:27:37.233647 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerName="pull" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.233652 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerName="pull" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.233737 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1" containerName="extract" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.234129 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.236254 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.236259 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.251636 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.252686 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-9bdfv" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.252873 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.271340 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj"] Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.290729 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-apiservice-cert\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.290794 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-webhook-cert\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.290820 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8whz4\" (UniqueName: \"kubernetes.io/projected/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-kube-api-access-8whz4\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.392161 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-apiservice-cert\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.392213 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-webhook-cert\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.392235 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8whz4\" (UniqueName: \"kubernetes.io/projected/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-kube-api-access-8whz4\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.397908 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-webhook-cert\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.398039 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-apiservice-cert\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.410612 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8whz4\" (UniqueName: \"kubernetes.io/projected/f14a8d6e-977f-4144-8a2a-d2d534a6f89b-kube-api-access-8whz4\") pod \"metallb-operator-controller-manager-8fcb75648-xwqnj\" (UID: \"f14a8d6e-977f-4144-8a2a-d2d534a6f89b\") " pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.555092 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z"] Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.556080 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.556554 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.561465 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.562057 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-cljqh" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.562170 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.582424 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z"] Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.602842 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-webhook-cert\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.602911 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j6f7\" (UniqueName: \"kubernetes.io/projected/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-kube-api-access-5j6f7\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.602957 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-apiservice-cert\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.705271 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-apiservice-cert\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.705316 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-webhook-cert\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.705365 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j6f7\" (UniqueName: \"kubernetes.io/projected/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-kube-api-access-5j6f7\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.708872 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-apiservice-cert\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.709046 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-webhook-cert\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.721686 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j6f7\" (UniqueName: \"kubernetes.io/projected/b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0-kube-api-access-5j6f7\") pod \"metallb-operator-webhook-server-556dcd774d-2bh5z\" (UID: \"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0\") " pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.774765 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj"] Oct 01 06:27:37 crc kubenswrapper[4747]: W1001 06:27:37.779866 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf14a8d6e_977f_4144_8a2a_d2d534a6f89b.slice/crio-8caa5cbeeba1a7771d1078adcc8630ac83a5d7a4bd5ec04785007b1a8ccb53c0 WatchSource:0}: Error finding container 8caa5cbeeba1a7771d1078adcc8630ac83a5d7a4bd5ec04785007b1a8ccb53c0: Status 404 returned error can't find the container with id 8caa5cbeeba1a7771d1078adcc8630ac83a5d7a4bd5ec04785007b1a8ccb53c0 Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.920521 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" event={"ID":"f14a8d6e-977f-4144-8a2a-d2d534a6f89b","Type":"ContainerStarted","Data":"8caa5cbeeba1a7771d1078adcc8630ac83a5d7a4bd5ec04785007b1a8ccb53c0"} Oct 01 06:27:37 crc kubenswrapper[4747]: I1001 06:27:37.932904 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:38 crc kubenswrapper[4747]: I1001 06:27:38.167093 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z"] Oct 01 06:27:38 crc kubenswrapper[4747]: W1001 06:27:38.171225 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb0bffa6f_ff4b_43d8_82f5_1be2e812e0a0.slice/crio-cd6d3a042ecd771ab4a1af1dba1a609621a378dfba248aba5e7292f16ab146a4 WatchSource:0}: Error finding container cd6d3a042ecd771ab4a1af1dba1a609621a378dfba248aba5e7292f16ab146a4: Status 404 returned error can't find the container with id cd6d3a042ecd771ab4a1af1dba1a609621a378dfba248aba5e7292f16ab146a4 Oct 01 06:27:38 crc kubenswrapper[4747]: I1001 06:27:38.928316 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" event={"ID":"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0","Type":"ContainerStarted","Data":"cd6d3a042ecd771ab4a1af1dba1a609621a378dfba248aba5e7292f16ab146a4"} Oct 01 06:27:40 crc kubenswrapper[4747]: I1001 06:27:40.945374 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" event={"ID":"f14a8d6e-977f-4144-8a2a-d2d534a6f89b","Type":"ContainerStarted","Data":"bd98f87437822eefaea4741994c94485dd27733f5d5a33779616f5e1f044191c"} Oct 01 06:27:40 crc kubenswrapper[4747]: I1001 06:27:40.945841 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:27:40 crc kubenswrapper[4747]: I1001 06:27:40.970052 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" podStartSLOduration=1.183739767 podStartE2EDuration="3.970038883s" podCreationTimestamp="2025-10-01 06:27:37 +0000 UTC" firstStartedPulling="2025-10-01 06:27:37.78442125 +0000 UTC m=+659.194078299" lastFinishedPulling="2025-10-01 06:27:40.570720366 +0000 UTC m=+661.980377415" observedRunningTime="2025-10-01 06:27:40.966590847 +0000 UTC m=+662.376247896" watchObservedRunningTime="2025-10-01 06:27:40.970038883 +0000 UTC m=+662.379695932" Oct 01 06:27:42 crc kubenswrapper[4747]: I1001 06:27:42.960198 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" event={"ID":"b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0","Type":"ContainerStarted","Data":"cd2064a2f8f3131061b71e204cca2e2af17025ebfff6fb95e096aec4f3137e10"} Oct 01 06:27:42 crc kubenswrapper[4747]: I1001 06:27:42.960569 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:27:42 crc kubenswrapper[4747]: I1001 06:27:42.985770 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" podStartSLOduration=1.8439779170000001 podStartE2EDuration="5.985736215s" podCreationTimestamp="2025-10-01 06:27:37 +0000 UTC" firstStartedPulling="2025-10-01 06:27:38.174533217 +0000 UTC m=+659.584190266" lastFinishedPulling="2025-10-01 06:27:42.316291475 +0000 UTC m=+663.725948564" observedRunningTime="2025-10-01 06:27:42.983712915 +0000 UTC m=+664.393369974" watchObservedRunningTime="2025-10-01 06:27:42.985736215 +0000 UTC m=+664.395393274" Oct 01 06:27:57 crc kubenswrapper[4747]: I1001 06:27:57.936935 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-556dcd774d-2bh5z" Oct 01 06:28:17 crc kubenswrapper[4747]: I1001 06:28:17.559444 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-8fcb75648-xwqnj" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.351687 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p"] Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.352471 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.356250 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-9rgjv"] Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.356895 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-2lvnm" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.357495 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.359356 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.364527 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.364701 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.366171 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p"] Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378343 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics-certs\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378394 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qvqb\" (UniqueName: \"kubernetes.io/projected/9bda04f7-665b-4bd0-9884-999b80fcb561-kube-api-access-6qvqb\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378424 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-sockets\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378466 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-cert\") pod \"frr-k8s-webhook-server-5478bdb765-mm45p\" (UID: \"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378650 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-reloader\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378731 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c6kk\" (UniqueName: \"kubernetes.io/projected/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-kube-api-access-2c6kk\") pod \"frr-k8s-webhook-server-5478bdb765-mm45p\" (UID: \"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378819 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378947 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-conf\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.378980 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-startup\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.446909 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-zw58l"] Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.447695 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.449032 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.449042 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.449560 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-j2886" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.450005 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.464589 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-5d688f5ffc-v6qhr"] Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.465598 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.466877 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480029 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480045 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-v6qhr"] Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480070 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfhqv\" (UniqueName: \"kubernetes.io/projected/25e93d69-ecec-4c53-81e5-18cd341f14f3-kube-api-access-nfhqv\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480099 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-metrics-certs\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/25e93d69-ecec-4c53-81e5-18cd341f14f3-metallb-excludel2\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480167 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgm8n\" (UniqueName: \"kubernetes.io/projected/ad53abf4-c21f-4dcc-9761-aed314fca36c-kube-api-access-wgm8n\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480187 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ad53abf4-c21f-4dcc-9761-aed314fca36c-cert\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480205 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-conf\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480219 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-startup\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480239 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480264 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics-certs\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480280 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qvqb\" (UniqueName: \"kubernetes.io/projected/9bda04f7-665b-4bd0-9884-999b80fcb561-kube-api-access-6qvqb\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480306 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-sockets\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480323 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ad53abf4-c21f-4dcc-9761-aed314fca36c-metrics-certs\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480345 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-cert\") pod \"frr-k8s-webhook-server-5478bdb765-mm45p\" (UID: \"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480366 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-reloader\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480381 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c6kk\" (UniqueName: \"kubernetes.io/projected/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-kube-api-access-2c6kk\") pod \"frr-k8s-webhook-server-5478bdb765-mm45p\" (UID: \"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.480845 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: E1001 06:28:18.480895 4747 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Oct 01 06:28:18 crc kubenswrapper[4747]: E1001 06:28:18.480974 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics-certs podName:9bda04f7-665b-4bd0-9884-999b80fcb561 nodeName:}" failed. No retries permitted until 2025-10-01 06:28:18.980941742 +0000 UTC m=+700.390598791 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics-certs") pod "frr-k8s-9rgjv" (UID: "9bda04f7-665b-4bd0-9884-999b80fcb561") : secret "frr-k8s-certs-secret" not found Oct 01 06:28:18 crc kubenswrapper[4747]: E1001 06:28:18.480977 4747 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Oct 01 06:28:18 crc kubenswrapper[4747]: E1001 06:28:18.481058 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-cert podName:0b60e795-ff1b-4dfc-b3cf-3b28b92ac293 nodeName:}" failed. No retries permitted until 2025-10-01 06:28:18.981035996 +0000 UTC m=+700.390693045 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-cert") pod "frr-k8s-webhook-server-5478bdb765-mm45p" (UID: "0b60e795-ff1b-4dfc-b3cf-3b28b92ac293") : secret "frr-k8s-webhook-server-cert" not found Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.481148 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-conf\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.481230 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-reloader\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.481290 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-sockets\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.481816 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9bda04f7-665b-4bd0-9884-999b80fcb561-frr-startup\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.502132 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c6kk\" (UniqueName: \"kubernetes.io/projected/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-kube-api-access-2c6kk\") pod \"frr-k8s-webhook-server-5478bdb765-mm45p\" (UID: \"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.502206 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qvqb\" (UniqueName: \"kubernetes.io/projected/9bda04f7-665b-4bd0-9884-999b80fcb561-kube-api-access-6qvqb\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.581960 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfhqv\" (UniqueName: \"kubernetes.io/projected/25e93d69-ecec-4c53-81e5-18cd341f14f3-kube-api-access-nfhqv\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.581999 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-metrics-certs\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.582021 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/25e93d69-ecec-4c53-81e5-18cd341f14f3-metallb-excludel2\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.582058 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgm8n\" (UniqueName: \"kubernetes.io/projected/ad53abf4-c21f-4dcc-9761-aed314fca36c-kube-api-access-wgm8n\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.582086 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ad53abf4-c21f-4dcc-9761-aed314fca36c-cert\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.582125 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.582172 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ad53abf4-c21f-4dcc-9761-aed314fca36c-metrics-certs\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: E1001 06:28:18.582742 4747 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 01 06:28:18 crc kubenswrapper[4747]: E1001 06:28:18.582857 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist podName:25e93d69-ecec-4c53-81e5-18cd341f14f3 nodeName:}" failed. No retries permitted until 2025-10-01 06:28:19.082816445 +0000 UTC m=+700.492473564 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist") pod "speaker-zw58l" (UID: "25e93d69-ecec-4c53-81e5-18cd341f14f3") : secret "metallb-memberlist" not found Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.583737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/25e93d69-ecec-4c53-81e5-18cd341f14f3-metallb-excludel2\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.585367 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-metrics-certs\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.586128 4747 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.591336 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ad53abf4-c21f-4dcc-9761-aed314fca36c-metrics-certs\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.595460 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ad53abf4-c21f-4dcc-9761-aed314fca36c-cert\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.599441 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgm8n\" (UniqueName: \"kubernetes.io/projected/ad53abf4-c21f-4dcc-9761-aed314fca36c-kube-api-access-wgm8n\") pod \"controller-5d688f5ffc-v6qhr\" (UID: \"ad53abf4-c21f-4dcc-9761-aed314fca36c\") " pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.604259 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfhqv\" (UniqueName: \"kubernetes.io/projected/25e93d69-ecec-4c53-81e5-18cd341f14f3-kube-api-access-nfhqv\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.778867 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.988054 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-cert\") pod \"frr-k8s-webhook-server-5478bdb765-mm45p\" (UID: \"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.988712 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics-certs\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.992170 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/0b60e795-ff1b-4dfc-b3cf-3b28b92ac293-cert\") pod \"frr-k8s-webhook-server-5478bdb765-mm45p\" (UID: \"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293\") " pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.994265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bda04f7-665b-4bd0-9884-999b80fcb561-metrics-certs\") pod \"frr-k8s-9rgjv\" (UID: \"9bda04f7-665b-4bd0-9884-999b80fcb561\") " pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:18 crc kubenswrapper[4747]: I1001 06:28:18.998257 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-5d688f5ffc-v6qhr"] Oct 01 06:28:19 crc kubenswrapper[4747]: I1001 06:28:19.089647 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:19 crc kubenswrapper[4747]: E1001 06:28:19.089825 4747 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 01 06:28:19 crc kubenswrapper[4747]: E1001 06:28:19.089881 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist podName:25e93d69-ecec-4c53-81e5-18cd341f14f3 nodeName:}" failed. No retries permitted until 2025-10-01 06:28:20.089869058 +0000 UTC m=+701.499526107 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist") pod "speaker-zw58l" (UID: "25e93d69-ecec-4c53-81e5-18cd341f14f3") : secret "metallb-memberlist" not found Oct 01 06:28:19 crc kubenswrapper[4747]: I1001 06:28:19.195381 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-v6qhr" event={"ID":"ad53abf4-c21f-4dcc-9761-aed314fca36c","Type":"ContainerStarted","Data":"e7b0aad81da9aca2d807b8acdb254eae7674b9a7f46888ed4613635da6e6c7b9"} Oct 01 06:28:19 crc kubenswrapper[4747]: I1001 06:28:19.195431 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-v6qhr" event={"ID":"ad53abf4-c21f-4dcc-9761-aed314fca36c","Type":"ContainerStarted","Data":"07495cd385d75137a13d0dbf39c5c68376b9370d563dc923d193d93b5a386ea4"} Oct 01 06:28:19 crc kubenswrapper[4747]: I1001 06:28:19.274307 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:19 crc kubenswrapper[4747]: I1001 06:28:19.281469 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:19 crc kubenswrapper[4747]: I1001 06:28:19.529268 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p"] Oct 01 06:28:19 crc kubenswrapper[4747]: W1001 06:28:19.535953 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b60e795_ff1b_4dfc_b3cf_3b28b92ac293.slice/crio-23abc2456da3703de6fdd5bcd2b643b5904d4b80dafd0a0e4207a221bf06218e WatchSource:0}: Error finding container 23abc2456da3703de6fdd5bcd2b643b5904d4b80dafd0a0e4207a221bf06218e: Status 404 returned error can't find the container with id 23abc2456da3703de6fdd5bcd2b643b5904d4b80dafd0a0e4207a221bf06218e Oct 01 06:28:20 crc kubenswrapper[4747]: I1001 06:28:20.100405 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:20 crc kubenswrapper[4747]: I1001 06:28:20.111948 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/25e93d69-ecec-4c53-81e5-18cd341f14f3-memberlist\") pod \"speaker-zw58l\" (UID: \"25e93d69-ecec-4c53-81e5-18cd341f14f3\") " pod="metallb-system/speaker-zw58l" Oct 01 06:28:20 crc kubenswrapper[4747]: I1001 06:28:20.201102 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerStarted","Data":"cdec7218e553ba8303d6d76e7ba1dafd8a796f0077cf6a01bca855f944cb643c"} Oct 01 06:28:20 crc kubenswrapper[4747]: I1001 06:28:20.202276 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" event={"ID":"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293","Type":"ContainerStarted","Data":"23abc2456da3703de6fdd5bcd2b643b5904d4b80dafd0a0e4207a221bf06218e"} Oct 01 06:28:20 crc kubenswrapper[4747]: I1001 06:28:20.259988 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-zw58l" Oct 01 06:28:20 crc kubenswrapper[4747]: W1001 06:28:20.282620 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25e93d69_ecec_4c53_81e5_18cd341f14f3.slice/crio-f9d25954d15b0af3f69d8b27ff5c60ec10445b3c1317461ddb41ed851ce1a0eb WatchSource:0}: Error finding container f9d25954d15b0af3f69d8b27ff5c60ec10445b3c1317461ddb41ed851ce1a0eb: Status 404 returned error can't find the container with id f9d25954d15b0af3f69d8b27ff5c60ec10445b3c1317461ddb41ed851ce1a0eb Oct 01 06:28:21 crc kubenswrapper[4747]: I1001 06:28:21.210187 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zw58l" event={"ID":"25e93d69-ecec-4c53-81e5-18cd341f14f3","Type":"ContainerStarted","Data":"01270d410611b5281a29f63b538de0322d63fb8b6e861eacf5d335e826abb9c8"} Oct 01 06:28:21 crc kubenswrapper[4747]: I1001 06:28:21.210466 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zw58l" event={"ID":"25e93d69-ecec-4c53-81e5-18cd341f14f3","Type":"ContainerStarted","Data":"f9d25954d15b0af3f69d8b27ff5c60ec10445b3c1317461ddb41ed851ce1a0eb"} Oct 01 06:28:21 crc kubenswrapper[4747]: E1001 06:28:21.975897 4747 log.go:32] "PullImage from image service failed" err="rpc error: code = Unknown desc = copying system image from manifest list: parsing image configuration: Get \"https://cdn01.quay.io/quayio-production-s3/sha256/84/8496687bb2e55d4e359bd30f70ed62c243efe3667d515895099951416ae5fe60?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIATAAF2YHTGR23ZTE6%2F20251001%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20251001T062820Z&X-Amz-Expires=600&X-Amz-SignedHeaders=host&X-Amz-Signature=822ad53c433f1e6171fb9ba7d61462fcd7c78665620af418063ff4ae11a36270®ion=us-east-1&namespace=redhat-prod&username=redhat-prod+registry_proxy&repo_name=openshift4----ose-kube-rbac-proxy-rhel9&akamai_signature=exp=1759301000~hmac=d6f40fc982ad8e2ba5d2421f1978153d85cad351ff5dbd98999963e1c2e42188\": remote error: tls: internal error" image="registry.redhat.io/openshift4/ose-kube-rbac-proxy-rhel9@sha256:467557f453bde3feaa0129f38c52fba00c3b98acddbf8978bf21ef13a9890fab" Oct 01 06:28:21 crc kubenswrapper[4747]: E1001 06:28:21.976090 4747 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:registry.redhat.io/openshift4/ose-kube-rbac-proxy-rhel9@sha256:467557f453bde3feaa0129f38c52fba00c3b98acddbf8978bf21ef13a9890fab,Command:[],Args:[--logtostderr --secure-listen-address=:9120 --upstream=http://localhost:29150/ --tls-cipher-suites=TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_RSA_WITH_AES_128_CBC_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 --tls-private-key-file=/etc/metrics/tls.key --tls-cert-file=/etc/metrics/tls.crt],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metricshttps,HostPort:9120,ContainerPort:9120,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{20971520 0} {} 20Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:metrics-certs,ReadOnly:true,MountPath:/etc/metrics,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nfhqv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod speaker-zw58l_metallb-system(25e93d69-ecec-4c53-81e5-18cd341f14f3): ErrImagePull: copying system image from manifest list: parsing image configuration: Get \"https://cdn01.quay.io/quayio-production-s3/sha256/84/8496687bb2e55d4e359bd30f70ed62c243efe3667d515895099951416ae5fe60?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIATAAF2YHTGR23ZTE6%2F20251001%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20251001T062820Z&X-Amz-Expires=600&X-Amz-SignedHeaders=host&X-Amz-Signature=822ad53c433f1e6171fb9ba7d61462fcd7c78665620af418063ff4ae11a36270®ion=us-east-1&namespace=redhat-prod&username=redhat-prod+registry_proxy&repo_name=openshift4----ose-kube-rbac-proxy-rhel9&akamai_signature=exp=1759301000~hmac=d6f40fc982ad8e2ba5d2421f1978153d85cad351ff5dbd98999963e1c2e42188\": remote error: tls: internal error" logger="UnhandledError" Oct 01 06:28:21 crc kubenswrapper[4747]: E1001 06:28:21.977453 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"copying system image from manifest list: parsing image configuration: Get \\\"https://cdn01.quay.io/quayio-production-s3/sha256/84/8496687bb2e55d4e359bd30f70ed62c243efe3667d515895099951416ae5fe60?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIATAAF2YHTGR23ZTE6%2F20251001%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20251001T062820Z&X-Amz-Expires=600&X-Amz-SignedHeaders=host&X-Amz-Signature=822ad53c433f1e6171fb9ba7d61462fcd7c78665620af418063ff4ae11a36270®ion=us-east-1&namespace=redhat-prod&username=redhat-prod+registry_proxy&repo_name=openshift4----ose-kube-rbac-proxy-rhel9&akamai_signature=exp=1759301000~hmac=d6f40fc982ad8e2ba5d2421f1978153d85cad351ff5dbd98999963e1c2e42188\\\": remote error: tls: internal error\"" pod="metallb-system/speaker-zw58l" podUID="25e93d69-ecec-4c53-81e5-18cd341f14f3" Oct 01 06:28:22 crc kubenswrapper[4747]: I1001 06:28:22.215820 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-zw58l" Oct 01 06:28:23 crc kubenswrapper[4747]: I1001 06:28:23.226202 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zw58l" event={"ID":"25e93d69-ecec-4c53-81e5-18cd341f14f3","Type":"ContainerStarted","Data":"22ec242b92f63f3a21b4a57240b2a9905305665688ea7631f72d29e72bb49f68"} Oct 01 06:28:23 crc kubenswrapper[4747]: I1001 06:28:23.229134 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-5d688f5ffc-v6qhr" event={"ID":"ad53abf4-c21f-4dcc-9761-aed314fca36c","Type":"ContainerStarted","Data":"bf5de02740430cb85ac6d5aa4a120f5287c074d84a02d0c00de9f01497dda306"} Oct 01 06:28:23 crc kubenswrapper[4747]: I1001 06:28:23.229305 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:23 crc kubenswrapper[4747]: I1001 06:28:23.245229 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-zw58l" podStartSLOduration=-9223372031.609558 podStartE2EDuration="5.245216952s" podCreationTimestamp="2025-10-01 06:28:18 +0000 UTC" firstStartedPulling="2025-10-01 06:28:20.578083319 +0000 UTC m=+701.987740368" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:28:23.240372642 +0000 UTC m=+704.650029691" watchObservedRunningTime="2025-10-01 06:28:23.245216952 +0000 UTC m=+704.654874001" Oct 01 06:28:26 crc kubenswrapper[4747]: I1001 06:28:26.251576 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" event={"ID":"0b60e795-ff1b-4dfc-b3cf-3b28b92ac293","Type":"ContainerStarted","Data":"07639c928a8ed419ca94db28585816428b8fd4fd8e238beae963b85ba5ecedbd"} Oct 01 06:28:26 crc kubenswrapper[4747]: I1001 06:28:26.252025 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:26 crc kubenswrapper[4747]: I1001 06:28:26.254131 4747 generic.go:334] "Generic (PLEG): container finished" podID="9bda04f7-665b-4bd0-9884-999b80fcb561" containerID="3b8aeb7bd939c1bc3bb4c149c3516ad45f23f31132cc82e15aec59b32ff97830" exitCode=0 Oct 01 06:28:26 crc kubenswrapper[4747]: I1001 06:28:26.254207 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerDied","Data":"3b8aeb7bd939c1bc3bb4c149c3516ad45f23f31132cc82e15aec59b32ff97830"} Oct 01 06:28:26 crc kubenswrapper[4747]: I1001 06:28:26.289220 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" podStartSLOduration=2.081345622 podStartE2EDuration="8.289203414s" podCreationTimestamp="2025-10-01 06:28:18 +0000 UTC" firstStartedPulling="2025-10-01 06:28:19.537786121 +0000 UTC m=+700.947443190" lastFinishedPulling="2025-10-01 06:28:25.745643933 +0000 UTC m=+707.155300982" observedRunningTime="2025-10-01 06:28:26.288102206 +0000 UTC m=+707.697759285" watchObservedRunningTime="2025-10-01 06:28:26.289203414 +0000 UTC m=+707.698860473" Oct 01 06:28:26 crc kubenswrapper[4747]: I1001 06:28:26.291220 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-5d688f5ffc-v6qhr" podStartSLOduration=5.320975336 podStartE2EDuration="8.291211704s" podCreationTimestamp="2025-10-01 06:28:18 +0000 UTC" firstStartedPulling="2025-10-01 06:28:19.135700637 +0000 UTC m=+700.545357686" lastFinishedPulling="2025-10-01 06:28:22.105937005 +0000 UTC m=+703.515594054" observedRunningTime="2025-10-01 06:28:23.26244142 +0000 UTC m=+704.672098479" watchObservedRunningTime="2025-10-01 06:28:26.291211704 +0000 UTC m=+707.700868763" Oct 01 06:28:27 crc kubenswrapper[4747]: I1001 06:28:27.264737 4747 generic.go:334] "Generic (PLEG): container finished" podID="9bda04f7-665b-4bd0-9884-999b80fcb561" containerID="0f68005e132346c85bef7debc5169841ea81ad755539b8860ad82437cd81ac22" exitCode=0 Oct 01 06:28:27 crc kubenswrapper[4747]: I1001 06:28:27.264847 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerDied","Data":"0f68005e132346c85bef7debc5169841ea81ad755539b8860ad82437cd81ac22"} Oct 01 06:28:28 crc kubenswrapper[4747]: I1001 06:28:28.274443 4747 generic.go:334] "Generic (PLEG): container finished" podID="9bda04f7-665b-4bd0-9884-999b80fcb561" containerID="2613386afa38f98d758b697bbaecb9dd420abfd246de73f6d6581d054e630f8c" exitCode=0 Oct 01 06:28:28 crc kubenswrapper[4747]: I1001 06:28:28.274501 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerDied","Data":"2613386afa38f98d758b697bbaecb9dd420abfd246de73f6d6581d054e630f8c"} Oct 01 06:28:29 crc kubenswrapper[4747]: I1001 06:28:29.287527 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerStarted","Data":"7543c7c2180c9dd59210df7ebdcca6076389f3e8d757ad2ec2281aae53d8bf1e"} Oct 01 06:28:29 crc kubenswrapper[4747]: I1001 06:28:29.288060 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerStarted","Data":"0c67dd667515799d1e87b83105110f5ba2e56a8b737698c1aa7c8442f4880032"} Oct 01 06:28:29 crc kubenswrapper[4747]: I1001 06:28:29.288075 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerStarted","Data":"f5ecc47249b70ce74cfc7e1b7abba8bdc65dd3f169d7f611770500b80ca9c0b6"} Oct 01 06:28:29 crc kubenswrapper[4747]: I1001 06:28:29.288086 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerStarted","Data":"3e509a82d95aae6072f5a2e39471b36de4ee12a4a4a5eb8f34349e1b25b3dc87"} Oct 01 06:28:29 crc kubenswrapper[4747]: I1001 06:28:29.288099 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerStarted","Data":"a4717fce74e92c422f04f684ae68385260f706e9951c5df3e51b29183109acab"} Oct 01 06:28:30 crc kubenswrapper[4747]: I1001 06:28:30.264719 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-zw58l" Oct 01 06:28:30 crc kubenswrapper[4747]: I1001 06:28:30.302359 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-9rgjv" event={"ID":"9bda04f7-665b-4bd0-9884-999b80fcb561","Type":"ContainerStarted","Data":"810aefa57b9a8762c90b24425a8555d42e1462a7e9ff4d2c65fc354c99f7b82e"} Oct 01 06:28:30 crc kubenswrapper[4747]: I1001 06:28:30.303847 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:30 crc kubenswrapper[4747]: I1001 06:28:30.334375 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-9rgjv" podStartSLOduration=5.929057882 podStartE2EDuration="12.33434881s" podCreationTimestamp="2025-10-01 06:28:18 +0000 UTC" firstStartedPulling="2025-10-01 06:28:19.424880216 +0000 UTC m=+700.834537295" lastFinishedPulling="2025-10-01 06:28:25.830171174 +0000 UTC m=+707.239828223" observedRunningTime="2025-10-01 06:28:30.330785322 +0000 UTC m=+711.740442401" watchObservedRunningTime="2025-10-01 06:28:30.33434881 +0000 UTC m=+711.744005889" Oct 01 06:28:34 crc kubenswrapper[4747]: I1001 06:28:34.282549 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:34 crc kubenswrapper[4747]: I1001 06:28:34.353016 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.247897 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-rh4l9"] Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.248727 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-rh4l9" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.250699 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.250927 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.252171 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-wl69n" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.298688 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-rh4l9"] Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.328151 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9qpk\" (UniqueName: \"kubernetes.io/projected/dabb982f-05de-4380-bd79-93ca018f5703-kube-api-access-c9qpk\") pod \"mariadb-operator-index-rh4l9\" (UID: \"dabb982f-05de-4380-bd79-93ca018f5703\") " pod="openstack-operators/mariadb-operator-index-rh4l9" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.429156 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9qpk\" (UniqueName: \"kubernetes.io/projected/dabb982f-05de-4380-bd79-93ca018f5703-kube-api-access-c9qpk\") pod \"mariadb-operator-index-rh4l9\" (UID: \"dabb982f-05de-4380-bd79-93ca018f5703\") " pod="openstack-operators/mariadb-operator-index-rh4l9" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.447517 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9qpk\" (UniqueName: \"kubernetes.io/projected/dabb982f-05de-4380-bd79-93ca018f5703-kube-api-access-c9qpk\") pod \"mariadb-operator-index-rh4l9\" (UID: \"dabb982f-05de-4380-bd79-93ca018f5703\") " pod="openstack-operators/mariadb-operator-index-rh4l9" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.576492 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-rh4l9" Oct 01 06:28:36 crc kubenswrapper[4747]: I1001 06:28:36.840352 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-rh4l9"] Oct 01 06:28:37 crc kubenswrapper[4747]: I1001 06:28:37.351468 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-rh4l9" event={"ID":"dabb982f-05de-4380-bd79-93ca018f5703","Type":"ContainerStarted","Data":"e0cb8f68c1f357a1235816606394b66610691c5ba573cec3d2597958ed0160e1"} Oct 01 06:28:38 crc kubenswrapper[4747]: I1001 06:28:38.359625 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-rh4l9" event={"ID":"dabb982f-05de-4380-bd79-93ca018f5703","Type":"ContainerStarted","Data":"a0a25a13a2f2cfeb756bf43451e2b049c5e023788711dd4863b030c44bc1ee8d"} Oct 01 06:28:38 crc kubenswrapper[4747]: I1001 06:28:38.384896 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-rh4l9" podStartSLOduration=1.580854054 podStartE2EDuration="2.384843354s" podCreationTimestamp="2025-10-01 06:28:36 +0000 UTC" firstStartedPulling="2025-10-01 06:28:36.849550005 +0000 UTC m=+718.259207054" lastFinishedPulling="2025-10-01 06:28:37.653539305 +0000 UTC m=+719.063196354" observedRunningTime="2025-10-01 06:28:38.383191322 +0000 UTC m=+719.792848371" watchObservedRunningTime="2025-10-01 06:28:38.384843354 +0000 UTC m=+719.794500453" Oct 01 06:28:38 crc kubenswrapper[4747]: I1001 06:28:38.783532 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-5d688f5ffc-v6qhr" Oct 01 06:28:39 crc kubenswrapper[4747]: I1001 06:28:39.289893 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-5478bdb765-mm45p" Oct 01 06:28:39 crc kubenswrapper[4747]: I1001 06:28:39.290286 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-9rgjv" Oct 01 06:28:39 crc kubenswrapper[4747]: I1001 06:28:39.610857 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-rh4l9"] Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.238216 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-qhm8b"] Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.240452 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.257509 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-qhm8b"] Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.280811 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5f84\" (UniqueName: \"kubernetes.io/projected/49ee0c08-0ca2-4d99-9a74-bb059025f4bc-kube-api-access-v5f84\") pod \"mariadb-operator-index-qhm8b\" (UID: \"49ee0c08-0ca2-4d99-9a74-bb059025f4bc\") " pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.371919 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-rh4l9" podUID="dabb982f-05de-4380-bd79-93ca018f5703" containerName="registry-server" containerID="cri-o://a0a25a13a2f2cfeb756bf43451e2b049c5e023788711dd4863b030c44bc1ee8d" gracePeriod=2 Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.382879 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5f84\" (UniqueName: \"kubernetes.io/projected/49ee0c08-0ca2-4d99-9a74-bb059025f4bc-kube-api-access-v5f84\") pod \"mariadb-operator-index-qhm8b\" (UID: \"49ee0c08-0ca2-4d99-9a74-bb059025f4bc\") " pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.425938 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5f84\" (UniqueName: \"kubernetes.io/projected/49ee0c08-0ca2-4d99-9a74-bb059025f4bc-kube-api-access-v5f84\") pod \"mariadb-operator-index-qhm8b\" (UID: \"49ee0c08-0ca2-4d99-9a74-bb059025f4bc\") " pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.574181 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:40 crc kubenswrapper[4747]: I1001 06:28:40.790926 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-qhm8b"] Oct 01 06:28:40 crc kubenswrapper[4747]: W1001 06:28:40.813154 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49ee0c08_0ca2_4d99_9a74_bb059025f4bc.slice/crio-5f94cf3d0b62754ff4049e78f7521f842bffb7f8ec4af4ecfe75483ec390a795 WatchSource:0}: Error finding container 5f94cf3d0b62754ff4049e78f7521f842bffb7f8ec4af4ecfe75483ec390a795: Status 404 returned error can't find the container with id 5f94cf3d0b62754ff4049e78f7521f842bffb7f8ec4af4ecfe75483ec390a795 Oct 01 06:28:41 crc kubenswrapper[4747]: I1001 06:28:41.381701 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-qhm8b" event={"ID":"49ee0c08-0ca2-4d99-9a74-bb059025f4bc","Type":"ContainerStarted","Data":"5f94cf3d0b62754ff4049e78f7521f842bffb7f8ec4af4ecfe75483ec390a795"} Oct 01 06:28:41 crc kubenswrapper[4747]: I1001 06:28:41.384626 4747 generic.go:334] "Generic (PLEG): container finished" podID="dabb982f-05de-4380-bd79-93ca018f5703" containerID="a0a25a13a2f2cfeb756bf43451e2b049c5e023788711dd4863b030c44bc1ee8d" exitCode=0 Oct 01 06:28:41 crc kubenswrapper[4747]: I1001 06:28:41.384684 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-rh4l9" event={"ID":"dabb982f-05de-4380-bd79-93ca018f5703","Type":"ContainerDied","Data":"a0a25a13a2f2cfeb756bf43451e2b049c5e023788711dd4863b030c44bc1ee8d"} Oct 01 06:28:41 crc kubenswrapper[4747]: I1001 06:28:41.894480 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-rh4l9" Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.005820 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9qpk\" (UniqueName: \"kubernetes.io/projected/dabb982f-05de-4380-bd79-93ca018f5703-kube-api-access-c9qpk\") pod \"dabb982f-05de-4380-bd79-93ca018f5703\" (UID: \"dabb982f-05de-4380-bd79-93ca018f5703\") " Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.012736 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dabb982f-05de-4380-bd79-93ca018f5703-kube-api-access-c9qpk" (OuterVolumeSpecName: "kube-api-access-c9qpk") pod "dabb982f-05de-4380-bd79-93ca018f5703" (UID: "dabb982f-05de-4380-bd79-93ca018f5703"). InnerVolumeSpecName "kube-api-access-c9qpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.107301 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9qpk\" (UniqueName: \"kubernetes.io/projected/dabb982f-05de-4380-bd79-93ca018f5703-kube-api-access-c9qpk\") on node \"crc\" DevicePath \"\"" Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.394370 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-qhm8b" event={"ID":"49ee0c08-0ca2-4d99-9a74-bb059025f4bc","Type":"ContainerStarted","Data":"5700c11e0df51b2a6a2420a2e56056e715a894f14eb6b179091b9fd46f92a413"} Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.396682 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-rh4l9" event={"ID":"dabb982f-05de-4380-bd79-93ca018f5703","Type":"ContainerDied","Data":"e0cb8f68c1f357a1235816606394b66610691c5ba573cec3d2597958ed0160e1"} Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.396781 4747 scope.go:117] "RemoveContainer" containerID="a0a25a13a2f2cfeb756bf43451e2b049c5e023788711dd4863b030c44bc1ee8d" Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.396798 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-rh4l9" Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.419580 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-qhm8b" podStartSLOduration=1.9234216339999999 podStartE2EDuration="2.419552437s" podCreationTimestamp="2025-10-01 06:28:40 +0000 UTC" firstStartedPulling="2025-10-01 06:28:40.817207309 +0000 UTC m=+722.226864358" lastFinishedPulling="2025-10-01 06:28:41.313338092 +0000 UTC m=+722.722995161" observedRunningTime="2025-10-01 06:28:42.410064864 +0000 UTC m=+723.819721953" watchObservedRunningTime="2025-10-01 06:28:42.419552437 +0000 UTC m=+723.829209526" Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.458258 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-rh4l9"] Oct 01 06:28:42 crc kubenswrapper[4747]: I1001 06:28:42.464959 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-rh4l9"] Oct 01 06:28:43 crc kubenswrapper[4747]: I1001 06:28:43.283899 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dabb982f-05de-4380-bd79-93ca018f5703" path="/var/lib/kubelet/pods/dabb982f-05de-4380-bd79-93ca018f5703/volumes" Oct 01 06:28:50 crc kubenswrapper[4747]: I1001 06:28:50.574822 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:50 crc kubenswrapper[4747]: I1001 06:28:50.575474 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:50 crc kubenswrapper[4747]: I1001 06:28:50.612688 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:51 crc kubenswrapper[4747]: I1001 06:28:51.506672 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-qhm8b" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.457722 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl"] Oct 01 06:28:52 crc kubenswrapper[4747]: E1001 06:28:52.458341 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dabb982f-05de-4380-bd79-93ca018f5703" containerName="registry-server" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.458361 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dabb982f-05de-4380-bd79-93ca018f5703" containerName="registry-server" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.458561 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dabb982f-05de-4380-bd79-93ca018f5703" containerName="registry-server" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.459785 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.466552 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-b9vtl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.475372 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl"] Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.556277 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-util\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.556365 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-bundle\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.556542 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxc9k\" (UniqueName: \"kubernetes.io/projected/2cd118b3-1b7f-497d-913e-2938d710bce4-kube-api-access-cxc9k\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.658037 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxc9k\" (UniqueName: \"kubernetes.io/projected/2cd118b3-1b7f-497d-913e-2938d710bce4-kube-api-access-cxc9k\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.658191 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-util\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.658968 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-bundle\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.659056 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-util\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.659337 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-bundle\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.697055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxc9k\" (UniqueName: \"kubernetes.io/projected/2cd118b3-1b7f-497d-913e-2938d710bce4-kube-api-access-cxc9k\") pod \"27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:52 crc kubenswrapper[4747]: I1001 06:28:52.786947 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:53 crc kubenswrapper[4747]: I1001 06:28:53.026684 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl"] Oct 01 06:28:53 crc kubenswrapper[4747]: I1001 06:28:53.484116 4747 generic.go:334] "Generic (PLEG): container finished" podID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerID="a38d7045779dbf00229d7cac2b08d6f90ccce999007fcaef756dda6fc0820d44" exitCode=0 Oct 01 06:28:53 crc kubenswrapper[4747]: I1001 06:28:53.485116 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" event={"ID":"2cd118b3-1b7f-497d-913e-2938d710bce4","Type":"ContainerDied","Data":"a38d7045779dbf00229d7cac2b08d6f90ccce999007fcaef756dda6fc0820d44"} Oct 01 06:28:53 crc kubenswrapper[4747]: I1001 06:28:53.485244 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" event={"ID":"2cd118b3-1b7f-497d-913e-2938d710bce4","Type":"ContainerStarted","Data":"35c3a96b7092d368d79296a22504ceca312f748492f57aa23aaca54dfdbe6555"} Oct 01 06:28:54 crc kubenswrapper[4747]: I1001 06:28:54.491152 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" event={"ID":"2cd118b3-1b7f-497d-913e-2938d710bce4","Type":"ContainerStarted","Data":"0dbdd729f3ab07ba3756c258d8838bfe357a76b4e08757f2063cab5709f9ae4e"} Oct 01 06:28:55 crc kubenswrapper[4747]: I1001 06:28:55.505701 4747 generic.go:334] "Generic (PLEG): container finished" podID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerID="0dbdd729f3ab07ba3756c258d8838bfe357a76b4e08757f2063cab5709f9ae4e" exitCode=0 Oct 01 06:28:55 crc kubenswrapper[4747]: I1001 06:28:55.506406 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" event={"ID":"2cd118b3-1b7f-497d-913e-2938d710bce4","Type":"ContainerDied","Data":"0dbdd729f3ab07ba3756c258d8838bfe357a76b4e08757f2063cab5709f9ae4e"} Oct 01 06:28:56 crc kubenswrapper[4747]: I1001 06:28:56.516413 4747 generic.go:334] "Generic (PLEG): container finished" podID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerID="6c43850481d5ebbd4b9a9b6d5201fd65786a4d39fbe6d407f1b6dcd2bec706b0" exitCode=0 Oct 01 06:28:56 crc kubenswrapper[4747]: I1001 06:28:56.516696 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" event={"ID":"2cd118b3-1b7f-497d-913e-2938d710bce4","Type":"ContainerDied","Data":"6c43850481d5ebbd4b9a9b6d5201fd65786a4d39fbe6d407f1b6dcd2bec706b0"} Oct 01 06:28:57 crc kubenswrapper[4747]: I1001 06:28:57.856163 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.031638 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-util\") pod \"2cd118b3-1b7f-497d-913e-2938d710bce4\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.031814 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxc9k\" (UniqueName: \"kubernetes.io/projected/2cd118b3-1b7f-497d-913e-2938d710bce4-kube-api-access-cxc9k\") pod \"2cd118b3-1b7f-497d-913e-2938d710bce4\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.031898 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-bundle\") pod \"2cd118b3-1b7f-497d-913e-2938d710bce4\" (UID: \"2cd118b3-1b7f-497d-913e-2938d710bce4\") " Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.033134 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-bundle" (OuterVolumeSpecName: "bundle") pod "2cd118b3-1b7f-497d-913e-2938d710bce4" (UID: "2cd118b3-1b7f-497d-913e-2938d710bce4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.038089 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cd118b3-1b7f-497d-913e-2938d710bce4-kube-api-access-cxc9k" (OuterVolumeSpecName: "kube-api-access-cxc9k") pod "2cd118b3-1b7f-497d-913e-2938d710bce4" (UID: "2cd118b3-1b7f-497d-913e-2938d710bce4"). InnerVolumeSpecName "kube-api-access-cxc9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.049852 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-util" (OuterVolumeSpecName: "util") pod "2cd118b3-1b7f-497d-913e-2938d710bce4" (UID: "2cd118b3-1b7f-497d-913e-2938d710bce4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.133720 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxc9k\" (UniqueName: \"kubernetes.io/projected/2cd118b3-1b7f-497d-913e-2938d710bce4-kube-api-access-cxc9k\") on node \"crc\" DevicePath \"\"" Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.133784 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.133797 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2cd118b3-1b7f-497d-913e-2938d710bce4-util\") on node \"crc\" DevicePath \"\"" Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.536849 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" event={"ID":"2cd118b3-1b7f-497d-913e-2938d710bce4","Type":"ContainerDied","Data":"35c3a96b7092d368d79296a22504ceca312f748492f57aa23aaca54dfdbe6555"} Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.536906 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35c3a96b7092d368d79296a22504ceca312f748492f57aa23aaca54dfdbe6555" Oct 01 06:28:58 crc kubenswrapper[4747]: I1001 06:28:58.536926 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.481944 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk"] Oct 01 06:29:06 crc kubenswrapper[4747]: E1001 06:29:06.482820 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerName="pull" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.482835 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerName="pull" Oct 01 06:29:06 crc kubenswrapper[4747]: E1001 06:29:06.482863 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerName="extract" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.482871 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerName="extract" Oct 01 06:29:06 crc kubenswrapper[4747]: E1001 06:29:06.482880 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerName="util" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.482887 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerName="util" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.483009 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cd118b3-1b7f-497d-913e-2938d710bce4" containerName="extract" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.483732 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.485418 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-4f9f8" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.485898 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.486287 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.497232 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk"] Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.674944 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf24r\" (UniqueName: \"kubernetes.io/projected/3a7cc65a-645c-4533-b334-5f003e1d8382-kube-api-access-nf24r\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.674999 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3a7cc65a-645c-4533-b334-5f003e1d8382-webhook-cert\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.675125 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3a7cc65a-645c-4533-b334-5f003e1d8382-apiservice-cert\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.776322 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3a7cc65a-645c-4533-b334-5f003e1d8382-apiservice-cert\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.776446 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf24r\" (UniqueName: \"kubernetes.io/projected/3a7cc65a-645c-4533-b334-5f003e1d8382-kube-api-access-nf24r\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.776479 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3a7cc65a-645c-4533-b334-5f003e1d8382-webhook-cert\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.783049 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3a7cc65a-645c-4533-b334-5f003e1d8382-webhook-cert\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.784588 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3a7cc65a-645c-4533-b334-5f003e1d8382-apiservice-cert\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:06 crc kubenswrapper[4747]: I1001 06:29:06.817033 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf24r\" (UniqueName: \"kubernetes.io/projected/3a7cc65a-645c-4533-b334-5f003e1d8382-kube-api-access-nf24r\") pod \"mariadb-operator-controller-manager-74c5fcf84b-mgdfk\" (UID: \"3a7cc65a-645c-4533-b334-5f003e1d8382\") " pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:07 crc kubenswrapper[4747]: I1001 06:29:07.102131 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:07 crc kubenswrapper[4747]: I1001 06:29:07.402453 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk"] Oct 01 06:29:07 crc kubenswrapper[4747]: I1001 06:29:07.591689 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" event={"ID":"3a7cc65a-645c-4533-b334-5f003e1d8382","Type":"ContainerStarted","Data":"3d4155d632ce3edd350cccd6a28aac62e731f9f6c23d0fc3f62ea3fe9aeb80a0"} Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.204736 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8jxsb"] Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.205466 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" podUID="e9b0ef66-c372-4715-b069-4cdbcaf66f1f" containerName="controller-manager" containerID="cri-o://2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186" gracePeriod=30 Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.279993 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s"] Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.280195 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" podUID="4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" containerName="route-controller-manager" containerID="cri-o://7e18c323293e3d00441759d43378d436345e02717647aad43e6c62de9a7af647" gracePeriod=30 Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.588729 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.612968 4747 generic.go:334] "Generic (PLEG): container finished" podID="e9b0ef66-c372-4715-b069-4cdbcaf66f1f" containerID="2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186" exitCode=0 Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.613292 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" event={"ID":"e9b0ef66-c372-4715-b069-4cdbcaf66f1f","Type":"ContainerDied","Data":"2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186"} Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.613322 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" event={"ID":"e9b0ef66-c372-4715-b069-4cdbcaf66f1f","Type":"ContainerDied","Data":"2aa41ee4a51de0b6fdab53b456a5885aff859576e1f0abd75b6ad1c648dd5e83"} Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.613337 4747 scope.go:117] "RemoveContainer" containerID="2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.613496 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-8jxsb" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.618261 4747 generic.go:334] "Generic (PLEG): container finished" podID="4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" containerID="7e18c323293e3d00441759d43378d436345e02717647aad43e6c62de9a7af647" exitCode=0 Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.618319 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" event={"ID":"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5","Type":"ContainerDied","Data":"7e18c323293e3d00441759d43378d436345e02717647aad43e6c62de9a7af647"} Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.633117 4747 scope.go:117] "RemoveContainer" containerID="2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186" Oct 01 06:29:08 crc kubenswrapper[4747]: E1001 06:29:08.633476 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186\": container with ID starting with 2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186 not found: ID does not exist" containerID="2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.633504 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186"} err="failed to get container status \"2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186\": rpc error: code = NotFound desc = could not find container \"2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186\": container with ID starting with 2f62b4ed3df63e1625901b6a7a88c715b1203cfdbdd4ff28d62e3a733b696186 not found: ID does not exist" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.635730 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.701645 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-config\") pod \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.701691 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-proxy-ca-bundles\") pod \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.701724 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9b9nt\" (UniqueName: \"kubernetes.io/projected/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-kube-api-access-9b9nt\") pod \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.701778 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-client-ca\") pod \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.701901 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-serving-cert\") pod \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\" (UID: \"e9b0ef66-c372-4715-b069-4cdbcaf66f1f\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.702897 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e9b0ef66-c372-4715-b069-4cdbcaf66f1f" (UID: "e9b0ef66-c372-4715-b069-4cdbcaf66f1f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.702979 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-config" (OuterVolumeSpecName: "config") pod "e9b0ef66-c372-4715-b069-4cdbcaf66f1f" (UID: "e9b0ef66-c372-4715-b069-4cdbcaf66f1f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.703449 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-client-ca" (OuterVolumeSpecName: "client-ca") pod "e9b0ef66-c372-4715-b069-4cdbcaf66f1f" (UID: "e9b0ef66-c372-4715-b069-4cdbcaf66f1f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.707311 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-kube-api-access-9b9nt" (OuterVolumeSpecName: "kube-api-access-9b9nt") pod "e9b0ef66-c372-4715-b069-4cdbcaf66f1f" (UID: "e9b0ef66-c372-4715-b069-4cdbcaf66f1f"). InnerVolumeSpecName "kube-api-access-9b9nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.707697 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e9b0ef66-c372-4715-b069-4cdbcaf66f1f" (UID: "e9b0ef66-c372-4715-b069-4cdbcaf66f1f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803239 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hw4d\" (UniqueName: \"kubernetes.io/projected/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-kube-api-access-7hw4d\") pod \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803341 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-client-ca\") pod \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803393 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-config\") pod \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803423 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-serving-cert\") pod \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\" (UID: \"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5\") " Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803660 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803683 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803692 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803702 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.803711 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9b9nt\" (UniqueName: \"kubernetes.io/projected/e9b0ef66-c372-4715-b069-4cdbcaf66f1f-kube-api-access-9b9nt\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.804355 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-config" (OuterVolumeSpecName: "config") pod "4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" (UID: "4f5ead6c-0aea-400a-a02d-4cd1fdded9c5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.804449 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-client-ca" (OuterVolumeSpecName: "client-ca") pod "4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" (UID: "4f5ead6c-0aea-400a-a02d-4cd1fdded9c5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.808554 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" (UID: "4f5ead6c-0aea-400a-a02d-4cd1fdded9c5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.809383 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-kube-api-access-7hw4d" (OuterVolumeSpecName: "kube-api-access-7hw4d") pod "4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" (UID: "4f5ead6c-0aea-400a-a02d-4cd1fdded9c5"). InnerVolumeSpecName "kube-api-access-7hw4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.905346 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hw4d\" (UniqueName: \"kubernetes.io/projected/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-kube-api-access-7hw4d\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.905388 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.905397 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.905405 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.944258 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8jxsb"] Oct 01 06:29:08 crc kubenswrapper[4747]: I1001 06:29:08.948248 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-8jxsb"] Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.290431 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9b0ef66-c372-4715-b069-4cdbcaf66f1f" path="/var/lib/kubelet/pods/e9b0ef66-c372-4715-b069-4cdbcaf66f1f/volumes" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.301260 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx"] Oct 01 06:29:09 crc kubenswrapper[4747]: E1001 06:29:09.301611 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9b0ef66-c372-4715-b069-4cdbcaf66f1f" containerName="controller-manager" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.301631 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9b0ef66-c372-4715-b069-4cdbcaf66f1f" containerName="controller-manager" Oct 01 06:29:09 crc kubenswrapper[4747]: E1001 06:29:09.301655 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" containerName="route-controller-manager" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.301667 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" containerName="route-controller-manager" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.301880 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9b0ef66-c372-4715-b069-4cdbcaf66f1f" containerName="controller-manager" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.301914 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" containerName="route-controller-manager" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.302492 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.308650 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.308959 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.309671 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.310108 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.310217 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.310322 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.311418 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx"] Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.318623 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.410370 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1cac432-d00e-45c2-b305-ba1c781829d9-serving-cert\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.410445 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-client-ca\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.410483 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-config\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.410513 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7nvh\" (UniqueName: \"kubernetes.io/projected/e1cac432-d00e-45c2-b305-ba1c781829d9-kube-api-access-r7nvh\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.410544 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-proxy-ca-bundles\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.512116 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1cac432-d00e-45c2-b305-ba1c781829d9-serving-cert\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.512637 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-client-ca\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.512842 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-config\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.513041 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7nvh\" (UniqueName: \"kubernetes.io/projected/e1cac432-d00e-45c2-b305-ba1c781829d9-kube-api-access-r7nvh\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.513631 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-proxy-ca-bundles\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.514200 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-config\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.514953 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-client-ca\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.515267 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-proxy-ca-bundles\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.528333 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1cac432-d00e-45c2-b305-ba1c781829d9-serving-cert\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.530990 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7nvh\" (UniqueName: \"kubernetes.io/projected/e1cac432-d00e-45c2-b305-ba1c781829d9-kube-api-access-r7nvh\") pod \"controller-manager-558b7bbdc8-rjvcx\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.629310 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" event={"ID":"4f5ead6c-0aea-400a-a02d-4cd1fdded9c5","Type":"ContainerDied","Data":"646c59b2538142ba8a26c8a28ab011552fa4c438a31a1bc5a6bde36517a99610"} Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.630844 4747 scope.go:117] "RemoveContainer" containerID="7e18c323293e3d00441759d43378d436345e02717647aad43e6c62de9a7af647" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.631197 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.631964 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.656289 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s"] Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.660903 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-77f8s"] Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.680276 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx"] Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.724564 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5"] Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.725346 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.729254 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.729328 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.729497 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.729546 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.729680 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.729772 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.740635 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5"] Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.921872 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5h9w\" (UniqueName: \"kubernetes.io/projected/5ee13a11-577c-4f57-9e62-d81c556901d9-kube-api-access-k5h9w\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.921961 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ee13a11-577c-4f57-9e62-d81c556901d9-serving-cert\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.922006 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ee13a11-577c-4f57-9e62-d81c556901d9-client-ca\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:09 crc kubenswrapper[4747]: I1001 06:29:09.922034 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ee13a11-577c-4f57-9e62-d81c556901d9-config\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.022887 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ee13a11-577c-4f57-9e62-d81c556901d9-serving-cert\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.022955 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ee13a11-577c-4f57-9e62-d81c556901d9-client-ca\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.022984 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ee13a11-577c-4f57-9e62-d81c556901d9-config\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.023014 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5h9w\" (UniqueName: \"kubernetes.io/projected/5ee13a11-577c-4f57-9e62-d81c556901d9-kube-api-access-k5h9w\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.023968 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ee13a11-577c-4f57-9e62-d81c556901d9-client-ca\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.024673 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ee13a11-577c-4f57-9e62-d81c556901d9-config\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.032158 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ee13a11-577c-4f57-9e62-d81c556901d9-serving-cert\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.045641 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5h9w\" (UniqueName: \"kubernetes.io/projected/5ee13a11-577c-4f57-9e62-d81c556901d9-kube-api-access-k5h9w\") pod \"route-controller-manager-7cfd77c7c4-vm2l5\" (UID: \"5ee13a11-577c-4f57-9e62-d81c556901d9\") " pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:10 crc kubenswrapper[4747]: I1001 06:29:10.047325 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:11 crc kubenswrapper[4747]: I1001 06:29:11.286862 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f5ead6c-0aea-400a-a02d-4cd1fdded9c5" path="/var/lib/kubelet/pods/4f5ead6c-0aea-400a-a02d-4cd1fdded9c5/volumes" Oct 01 06:29:11 crc kubenswrapper[4747]: I1001 06:29:11.480255 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5"] Oct 01 06:29:11 crc kubenswrapper[4747]: W1001 06:29:11.488419 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ee13a11_577c_4f57_9e62_d81c556901d9.slice/crio-67c9040ba74e569b610ae4877f51f70b4262e3bbfdcfdc970bc09da58497b514 WatchSource:0}: Error finding container 67c9040ba74e569b610ae4877f51f70b4262e3bbfdcfdc970bc09da58497b514: Status 404 returned error can't find the container with id 67c9040ba74e569b610ae4877f51f70b4262e3bbfdcfdc970bc09da58497b514 Oct 01 06:29:11 crc kubenswrapper[4747]: I1001 06:29:11.537304 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx"] Oct 01 06:29:11 crc kubenswrapper[4747]: W1001 06:29:11.550789 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1cac432_d00e_45c2_b305_ba1c781829d9.slice/crio-f81e674dd87782c12f9d450a90f4cbfb5ac6983c868ab77d35c6ab13f649433d WatchSource:0}: Error finding container f81e674dd87782c12f9d450a90f4cbfb5ac6983c868ab77d35c6ab13f649433d: Status 404 returned error can't find the container with id f81e674dd87782c12f9d450a90f4cbfb5ac6983c868ab77d35c6ab13f649433d Oct 01 06:29:11 crc kubenswrapper[4747]: I1001 06:29:11.652953 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" event={"ID":"e1cac432-d00e-45c2-b305-ba1c781829d9","Type":"ContainerStarted","Data":"f81e674dd87782c12f9d450a90f4cbfb5ac6983c868ab77d35c6ab13f649433d"} Oct 01 06:29:11 crc kubenswrapper[4747]: I1001 06:29:11.654336 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" event={"ID":"3a7cc65a-645c-4533-b334-5f003e1d8382","Type":"ContainerStarted","Data":"80a6cf4dd8cd716a1670613ddfec332f6cce01c2b2ec00a35efe4ac8ea80e81d"} Oct 01 06:29:11 crc kubenswrapper[4747]: I1001 06:29:11.655425 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" event={"ID":"5ee13a11-577c-4f57-9e62-d81c556901d9","Type":"ContainerStarted","Data":"67c9040ba74e569b610ae4877f51f70b4262e3bbfdcfdc970bc09da58497b514"} Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.120005 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-56n9r"] Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.121302 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.132775 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-56n9r"] Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.258278 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-catalog-content\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.258524 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-utilities\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.258590 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwn6r\" (UniqueName: \"kubernetes.io/projected/6c242ec4-4379-47ff-9e7d-aadf904daf58-kube-api-access-vwn6r\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.360341 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-utilities\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.360395 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwn6r\" (UniqueName: \"kubernetes.io/projected/6c242ec4-4379-47ff-9e7d-aadf904daf58-kube-api-access-vwn6r\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.360493 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-catalog-content\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.361097 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-utilities\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.361186 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-catalog-content\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.386201 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwn6r\" (UniqueName: \"kubernetes.io/projected/6c242ec4-4379-47ff-9e7d-aadf904daf58-kube-api-access-vwn6r\") pod \"community-operators-56n9r\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.441072 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.663958 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" event={"ID":"5ee13a11-577c-4f57-9e62-d81c556901d9","Type":"ContainerStarted","Data":"6eaf18252d7f94d78884ff7571e7af8279086d8395ddfb1f878e23caa03752e1"} Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.664314 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.669080 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" event={"ID":"e1cac432-d00e-45c2-b305-ba1c781829d9","Type":"ContainerStarted","Data":"bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926"} Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.669795 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" podUID="e1cac432-d00e-45c2-b305-ba1c781829d9" containerName="controller-manager" containerID="cri-o://bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926" gracePeriod=30 Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.670399 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.679869 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.689078 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7cfd77c7c4-vm2l5" podStartSLOduration=3.689062867 podStartE2EDuration="3.689062867s" podCreationTimestamp="2025-10-01 06:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:29:12.689047467 +0000 UTC m=+754.098704516" watchObservedRunningTime="2025-10-01 06:29:12.689062867 +0000 UTC m=+754.098719916" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.713616 4747 patch_prober.go:28] interesting pod/controller-manager-558b7bbdc8-rjvcx container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.49:8443/healthz\": read tcp 10.217.0.2:39814->10.217.0.49:8443: read: connection reset by peer" start-of-body= Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.713653 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" podUID="e1cac432-d00e-45c2-b305-ba1c781829d9" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.49:8443/healthz\": read tcp 10.217.0.2:39814->10.217.0.49:8443: read: connection reset by peer" Oct 01 06:29:12 crc kubenswrapper[4747]: I1001 06:29:12.733103 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" podStartSLOduration=4.733089035 podStartE2EDuration="4.733089035s" podCreationTimestamp="2025-10-01 06:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:29:12.73017479 +0000 UTC m=+754.139831829" watchObservedRunningTime="2025-10-01 06:29:12.733089035 +0000 UTC m=+754.142746084" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.346439 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.371643 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5f846485c8-xw2n9"] Oct 01 06:29:13 crc kubenswrapper[4747]: E1001 06:29:13.372011 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1cac432-d00e-45c2-b305-ba1c781829d9" containerName="controller-manager" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.372024 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1cac432-d00e-45c2-b305-ba1c781829d9" containerName="controller-manager" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.392972 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1cac432-d00e-45c2-b305-ba1c781829d9-serving-cert\") pod \"e1cac432-d00e-45c2-b305-ba1c781829d9\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.393018 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-client-ca\") pod \"e1cac432-d00e-45c2-b305-ba1c781829d9\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.393045 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-proxy-ca-bundles\") pod \"e1cac432-d00e-45c2-b305-ba1c781829d9\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.393079 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-config\") pod \"e1cac432-d00e-45c2-b305-ba1c781829d9\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.393108 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7nvh\" (UniqueName: \"kubernetes.io/projected/e1cac432-d00e-45c2-b305-ba1c781829d9-kube-api-access-r7nvh\") pod \"e1cac432-d00e-45c2-b305-ba1c781829d9\" (UID: \"e1cac432-d00e-45c2-b305-ba1c781829d9\") " Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.398813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1cac432-d00e-45c2-b305-ba1c781829d9-kube-api-access-r7nvh" (OuterVolumeSpecName: "kube-api-access-r7nvh") pod "e1cac432-d00e-45c2-b305-ba1c781829d9" (UID: "e1cac432-d00e-45c2-b305-ba1c781829d9"). InnerVolumeSpecName "kube-api-access-r7nvh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.401249 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1cac432-d00e-45c2-b305-ba1c781829d9" containerName="controller-manager" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.401321 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1cac432-d00e-45c2-b305-ba1c781829d9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e1cac432-d00e-45c2-b305-ba1c781829d9" (UID: "e1cac432-d00e-45c2-b305-ba1c781829d9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.401854 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-client-ca" (OuterVolumeSpecName: "client-ca") pod "e1cac432-d00e-45c2-b305-ba1c781829d9" (UID: "e1cac432-d00e-45c2-b305-ba1c781829d9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.401880 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.402337 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e1cac432-d00e-45c2-b305-ba1c781829d9" (UID: "e1cac432-d00e-45c2-b305-ba1c781829d9"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.402571 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-config" (OuterVolumeSpecName: "config") pod "e1cac432-d00e-45c2-b305-ba1c781829d9" (UID: "e1cac432-d00e-45c2-b305-ba1c781829d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.411557 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f846485c8-xw2n9"] Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494051 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-config\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494139 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw5kd\" (UniqueName: \"kubernetes.io/projected/2668f94b-b843-4bbc-be94-1d12ee878680-kube-api-access-fw5kd\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494173 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-proxy-ca-bundles\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494242 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2668f94b-b843-4bbc-be94-1d12ee878680-serving-cert\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494294 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-client-ca\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494328 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7nvh\" (UniqueName: \"kubernetes.io/projected/e1cac432-d00e-45c2-b305-ba1c781829d9-kube-api-access-r7nvh\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494340 4747 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e1cac432-d00e-45c2-b305-ba1c781829d9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494350 4747 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494357 4747 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.494365 4747 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1cac432-d00e-45c2-b305-ba1c781829d9-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.595115 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-proxy-ca-bundles\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.595176 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2668f94b-b843-4bbc-be94-1d12ee878680-serving-cert\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.595228 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-client-ca\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.595249 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-config\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.595288 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw5kd\" (UniqueName: \"kubernetes.io/projected/2668f94b-b843-4bbc-be94-1d12ee878680-kube-api-access-fw5kd\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.596122 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-client-ca\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.596947 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-config\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.597830 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2668f94b-b843-4bbc-be94-1d12ee878680-proxy-ca-bundles\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.599307 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2668f94b-b843-4bbc-be94-1d12ee878680-serving-cert\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.611483 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw5kd\" (UniqueName: \"kubernetes.io/projected/2668f94b-b843-4bbc-be94-1d12ee878680-kube-api-access-fw5kd\") pod \"controller-manager-5f846485c8-xw2n9\" (UID: \"2668f94b-b843-4bbc-be94-1d12ee878680\") " pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.676068 4747 generic.go:334] "Generic (PLEG): container finished" podID="e1cac432-d00e-45c2-b305-ba1c781829d9" containerID="bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926" exitCode=0 Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.676129 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.676179 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" event={"ID":"e1cac432-d00e-45c2-b305-ba1c781829d9","Type":"ContainerDied","Data":"bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926"} Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.678062 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx" event={"ID":"e1cac432-d00e-45c2-b305-ba1c781829d9","Type":"ContainerDied","Data":"f81e674dd87782c12f9d450a90f4cbfb5ac6983c868ab77d35c6ab13f649433d"} Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.678085 4747 scope.go:117] "RemoveContainer" containerID="bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.711986 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-56n9r"] Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.715181 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx"] Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.720586 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-558b7bbdc8-rjvcx"] Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.753944 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:13 crc kubenswrapper[4747]: W1001 06:29:13.902266 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c242ec4_4379_47ff_9e7d_aadf904daf58.slice/crio-1d98aa78277e17c77cc7915c6ec95ce7705d675c51a9db0ede333e7f3601410d WatchSource:0}: Error finding container 1d98aa78277e17c77cc7915c6ec95ce7705d675c51a9db0ede333e7f3601410d: Status 404 returned error can't find the container with id 1d98aa78277e17c77cc7915c6ec95ce7705d675c51a9db0ede333e7f3601410d Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.911951 4747 scope.go:117] "RemoveContainer" containerID="bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926" Oct 01 06:29:13 crc kubenswrapper[4747]: E1001 06:29:13.912491 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926\": container with ID starting with bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926 not found: ID does not exist" containerID="bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926" Oct 01 06:29:13 crc kubenswrapper[4747]: I1001 06:29:13.912532 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926"} err="failed to get container status \"bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926\": rpc error: code = NotFound desc = could not find container \"bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926\": container with ID starting with bd66957379d17d344885a55e12de268a8bf6aec73b188db0b2eda72f49aed926 not found: ID does not exist" Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.185833 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f846485c8-xw2n9"] Oct 01 06:29:14 crc kubenswrapper[4747]: W1001 06:29:14.193174 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2668f94b_b843_4bbc_be94_1d12ee878680.slice/crio-1e456c5f6b6f4a35f861af1d7479e6baf858daa4363070c5ed06b95d8a635090 WatchSource:0}: Error finding container 1e456c5f6b6f4a35f861af1d7479e6baf858daa4363070c5ed06b95d8a635090: Status 404 returned error can't find the container with id 1e456c5f6b6f4a35f861af1d7479e6baf858daa4363070c5ed06b95d8a635090 Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.682040 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" event={"ID":"3a7cc65a-645c-4533-b334-5f003e1d8382","Type":"ContainerStarted","Data":"46a548220df8e8c6693ccbc26c7780585c46513e7131218237a758232653f0b5"} Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.682472 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.684844 4747 generic.go:334] "Generic (PLEG): container finished" podID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerID="ffa4870636538fb57b4f26bf56b6eb1303699a2f2b9b1c9ee3d34547716306d8" exitCode=0 Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.684909 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-56n9r" event={"ID":"6c242ec4-4379-47ff-9e7d-aadf904daf58","Type":"ContainerDied","Data":"ffa4870636538fb57b4f26bf56b6eb1303699a2f2b9b1c9ee3d34547716306d8"} Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.684925 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-56n9r" event={"ID":"6c242ec4-4379-47ff-9e7d-aadf904daf58","Type":"ContainerStarted","Data":"1d98aa78277e17c77cc7915c6ec95ce7705d675c51a9db0ede333e7f3601410d"} Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.691702 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" event={"ID":"2668f94b-b843-4bbc-be94-1d12ee878680","Type":"ContainerStarted","Data":"b39d2b6767e433996597217aac467231d5c88e8fc11fb9ff4d983e5d9b9501df"} Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.691733 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.691745 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" event={"ID":"2668f94b-b843-4bbc-be94-1d12ee878680","Type":"ContainerStarted","Data":"1e456c5f6b6f4a35f861af1d7479e6baf858daa4363070c5ed06b95d8a635090"} Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.696216 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.701973 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" podStartSLOduration=2.152112904 podStartE2EDuration="8.701952274s" podCreationTimestamp="2025-10-01 06:29:06 +0000 UTC" firstStartedPulling="2025-10-01 06:29:07.412360639 +0000 UTC m=+748.822017688" lastFinishedPulling="2025-10-01 06:29:13.962200009 +0000 UTC m=+755.371857058" observedRunningTime="2025-10-01 06:29:14.699806249 +0000 UTC m=+756.109463288" watchObservedRunningTime="2025-10-01 06:29:14.701952274 +0000 UTC m=+756.111609323" Oct 01 06:29:14 crc kubenswrapper[4747]: I1001 06:29:14.738454 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5f846485c8-xw2n9" podStartSLOduration=5.738437788 podStartE2EDuration="5.738437788s" podCreationTimestamp="2025-10-01 06:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:29:14.735883843 +0000 UTC m=+756.145540892" watchObservedRunningTime="2025-10-01 06:29:14.738437788 +0000 UTC m=+756.148094837" Oct 01 06:29:15 crc kubenswrapper[4747]: I1001 06:29:15.136000 4747 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 01 06:29:15 crc kubenswrapper[4747]: I1001 06:29:15.284448 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1cac432-d00e-45c2-b305-ba1c781829d9" path="/var/lib/kubelet/pods/e1cac432-d00e-45c2-b305-ba1c781829d9/volumes" Oct 01 06:29:15 crc kubenswrapper[4747]: I1001 06:29:15.699436 4747 generic.go:334] "Generic (PLEG): container finished" podID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerID="874606643d8bee1ded96b8a5433e6f67781043f7de215ab75d3fd0768f73d7a2" exitCode=0 Oct 01 06:29:15 crc kubenswrapper[4747]: I1001 06:29:15.699497 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-56n9r" event={"ID":"6c242ec4-4379-47ff-9e7d-aadf904daf58","Type":"ContainerDied","Data":"874606643d8bee1ded96b8a5433e6f67781043f7de215ab75d3fd0768f73d7a2"} Oct 01 06:29:16 crc kubenswrapper[4747]: I1001 06:29:16.710405 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-56n9r" event={"ID":"6c242ec4-4379-47ff-9e7d-aadf904daf58","Type":"ContainerStarted","Data":"06ecf259f43545e72d5c90288b90be71531c2693149164b9974dfc59cbaa0146"} Oct 01 06:29:16 crc kubenswrapper[4747]: I1001 06:29:16.737354 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-56n9r" podStartSLOduration=2.909054271 podStartE2EDuration="4.737332257s" podCreationTimestamp="2025-10-01 06:29:12 +0000 UTC" firstStartedPulling="2025-10-01 06:29:14.685698978 +0000 UTC m=+756.095356027" lastFinishedPulling="2025-10-01 06:29:16.513976964 +0000 UTC m=+757.923634013" observedRunningTime="2025-10-01 06:29:16.73040606 +0000 UTC m=+758.140063139" watchObservedRunningTime="2025-10-01 06:29:16.737332257 +0000 UTC m=+758.146989326" Oct 01 06:29:17 crc kubenswrapper[4747]: I1001 06:29:17.107924 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-74c5fcf84b-mgdfk" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.108397 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-tgczc"] Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.109902 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-tgczc" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.112769 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-lk7c2" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.127603 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-tgczc"] Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.308249 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxx4g\" (UniqueName: \"kubernetes.io/projected/ec39777a-4bcc-43b6-976a-80c654371512-kube-api-access-nxx4g\") pod \"infra-operator-index-tgczc\" (UID: \"ec39777a-4bcc-43b6-976a-80c654371512\") " pod="openstack-operators/infra-operator-index-tgczc" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.410560 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxx4g\" (UniqueName: \"kubernetes.io/projected/ec39777a-4bcc-43b6-976a-80c654371512-kube-api-access-nxx4g\") pod \"infra-operator-index-tgczc\" (UID: \"ec39777a-4bcc-43b6-976a-80c654371512\") " pod="openstack-operators/infra-operator-index-tgczc" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.442532 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxx4g\" (UniqueName: \"kubernetes.io/projected/ec39777a-4bcc-43b6-976a-80c654371512-kube-api-access-nxx4g\") pod \"infra-operator-index-tgczc\" (UID: \"ec39777a-4bcc-43b6-976a-80c654371512\") " pod="openstack-operators/infra-operator-index-tgczc" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.442608 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.443362 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.446445 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-tgczc" Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.516348 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:22 crc kubenswrapper[4747]: W1001 06:29:22.768629 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec39777a_4bcc_43b6_976a_80c654371512.slice/crio-bdbb36545871fdfb892ba9a264520145a6b0a6c655380f273bbd1868bebe63be WatchSource:0}: Error finding container bdbb36545871fdfb892ba9a264520145a6b0a6c655380f273bbd1868bebe63be: Status 404 returned error can't find the container with id bdbb36545871fdfb892ba9a264520145a6b0a6c655380f273bbd1868bebe63be Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.771425 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-tgczc"] Oct 01 06:29:22 crc kubenswrapper[4747]: I1001 06:29:22.801500 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:23 crc kubenswrapper[4747]: I1001 06:29:23.767178 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-tgczc" event={"ID":"ec39777a-4bcc-43b6-976a-80c654371512","Type":"ContainerStarted","Data":"bdbb36545871fdfb892ba9a264520145a6b0a6c655380f273bbd1868bebe63be"} Oct 01 06:29:24 crc kubenswrapper[4747]: I1001 06:29:24.772837 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-tgczc" event={"ID":"ec39777a-4bcc-43b6-976a-80c654371512","Type":"ContainerStarted","Data":"7164f5fcf895ffb6ce2a11ca442b9d04970743101cbffdd506bb7d9a06fea1be"} Oct 01 06:29:24 crc kubenswrapper[4747]: I1001 06:29:24.787812 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-tgczc" podStartSLOduration=1.851369225 podStartE2EDuration="2.787797709s" podCreationTimestamp="2025-10-01 06:29:22 +0000 UTC" firstStartedPulling="2025-10-01 06:29:22.771382851 +0000 UTC m=+764.181039910" lastFinishedPulling="2025-10-01 06:29:23.707811335 +0000 UTC m=+765.117468394" observedRunningTime="2025-10-01 06:29:24.785872529 +0000 UTC m=+766.195529578" watchObservedRunningTime="2025-10-01 06:29:24.787797709 +0000 UTC m=+766.197454758" Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.305211 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-56n9r"] Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.306473 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-56n9r" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerName="registry-server" containerID="cri-o://06ecf259f43545e72d5c90288b90be71531c2693149164b9974dfc59cbaa0146" gracePeriod=2 Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.502403 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-tgczc"] Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.502745 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-tgczc" podUID="ec39777a-4bcc-43b6-976a-80c654371512" containerName="registry-server" containerID="cri-o://7164f5fcf895ffb6ce2a11ca442b9d04970743101cbffdd506bb7d9a06fea1be" gracePeriod=2 Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.800540 4747 generic.go:334] "Generic (PLEG): container finished" podID="ec39777a-4bcc-43b6-976a-80c654371512" containerID="7164f5fcf895ffb6ce2a11ca442b9d04970743101cbffdd506bb7d9a06fea1be" exitCode=0 Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.800613 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-tgczc" event={"ID":"ec39777a-4bcc-43b6-976a-80c654371512","Type":"ContainerDied","Data":"7164f5fcf895ffb6ce2a11ca442b9d04970743101cbffdd506bb7d9a06fea1be"} Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.804347 4747 generic.go:334] "Generic (PLEG): container finished" podID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerID="06ecf259f43545e72d5c90288b90be71531c2693149164b9974dfc59cbaa0146" exitCode=0 Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.804394 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-56n9r" event={"ID":"6c242ec4-4379-47ff-9e7d-aadf904daf58","Type":"ContainerDied","Data":"06ecf259f43545e72d5c90288b90be71531c2693149164b9974dfc59cbaa0146"} Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.850053 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.991866 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwn6r\" (UniqueName: \"kubernetes.io/projected/6c242ec4-4379-47ff-9e7d-aadf904daf58-kube-api-access-vwn6r\") pod \"6c242ec4-4379-47ff-9e7d-aadf904daf58\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.991999 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-catalog-content\") pod \"6c242ec4-4379-47ff-9e7d-aadf904daf58\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.992031 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-utilities\") pod \"6c242ec4-4379-47ff-9e7d-aadf904daf58\" (UID: \"6c242ec4-4379-47ff-9e7d-aadf904daf58\") " Oct 01 06:29:27 crc kubenswrapper[4747]: I1001 06:29:27.992813 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-utilities" (OuterVolumeSpecName: "utilities") pod "6c242ec4-4379-47ff-9e7d-aadf904daf58" (UID: "6c242ec4-4379-47ff-9e7d-aadf904daf58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.001690 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c242ec4-4379-47ff-9e7d-aadf904daf58-kube-api-access-vwn6r" (OuterVolumeSpecName: "kube-api-access-vwn6r") pod "6c242ec4-4379-47ff-9e7d-aadf904daf58" (UID: "6c242ec4-4379-47ff-9e7d-aadf904daf58"). InnerVolumeSpecName "kube-api-access-vwn6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.033161 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-tgczc" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.036485 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c242ec4-4379-47ff-9e7d-aadf904daf58" (UID: "6c242ec4-4379-47ff-9e7d-aadf904daf58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.094066 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.094105 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c242ec4-4379-47ff-9e7d-aadf904daf58-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.094119 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwn6r\" (UniqueName: \"kubernetes.io/projected/6c242ec4-4379-47ff-9e7d-aadf904daf58-kube-api-access-vwn6r\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.195419 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxx4g\" (UniqueName: \"kubernetes.io/projected/ec39777a-4bcc-43b6-976a-80c654371512-kube-api-access-nxx4g\") pod \"ec39777a-4bcc-43b6-976a-80c654371512\" (UID: \"ec39777a-4bcc-43b6-976a-80c654371512\") " Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.200120 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec39777a-4bcc-43b6-976a-80c654371512-kube-api-access-nxx4g" (OuterVolumeSpecName: "kube-api-access-nxx4g") pod "ec39777a-4bcc-43b6-976a-80c654371512" (UID: "ec39777a-4bcc-43b6-976a-80c654371512"). InnerVolumeSpecName "kube-api-access-nxx4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.296675 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxx4g\" (UniqueName: \"kubernetes.io/projected/ec39777a-4bcc-43b6-976a-80c654371512-kube-api-access-nxx4g\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.314492 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-ftl54"] Oct 01 06:29:28 crc kubenswrapper[4747]: E1001 06:29:28.314856 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerName="registry-server" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.314879 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerName="registry-server" Oct 01 06:29:28 crc kubenswrapper[4747]: E1001 06:29:28.314904 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerName="extract-content" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.314916 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerName="extract-content" Oct 01 06:29:28 crc kubenswrapper[4747]: E1001 06:29:28.314941 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerName="extract-utilities" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.314952 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerName="extract-utilities" Oct 01 06:29:28 crc kubenswrapper[4747]: E1001 06:29:28.314970 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec39777a-4bcc-43b6-976a-80c654371512" containerName="registry-server" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.314983 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec39777a-4bcc-43b6-976a-80c654371512" containerName="registry-server" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.315193 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec39777a-4bcc-43b6-976a-80c654371512" containerName="registry-server" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.315224 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" containerName="registry-server" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.316087 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.338830 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-ftl54"] Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.499603 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxpmf\" (UniqueName: \"kubernetes.io/projected/8482a7f4-08c7-4bc5-bef0-b44cd14ca523-kube-api-access-gxpmf\") pod \"infra-operator-index-ftl54\" (UID: \"8482a7f4-08c7-4bc5-bef0-b44cd14ca523\") " pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.600807 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxpmf\" (UniqueName: \"kubernetes.io/projected/8482a7f4-08c7-4bc5-bef0-b44cd14ca523-kube-api-access-gxpmf\") pod \"infra-operator-index-ftl54\" (UID: \"8482a7f4-08c7-4bc5-bef0-b44cd14ca523\") " pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.623738 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxpmf\" (UniqueName: \"kubernetes.io/projected/8482a7f4-08c7-4bc5-bef0-b44cd14ca523-kube-api-access-gxpmf\") pod \"infra-operator-index-ftl54\" (UID: \"8482a7f4-08c7-4bc5-bef0-b44cd14ca523\") " pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.652130 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.815653 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-tgczc" event={"ID":"ec39777a-4bcc-43b6-976a-80c654371512","Type":"ContainerDied","Data":"bdbb36545871fdfb892ba9a264520145a6b0a6c655380f273bbd1868bebe63be"} Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.815954 4747 scope.go:117] "RemoveContainer" containerID="7164f5fcf895ffb6ce2a11ca442b9d04970743101cbffdd506bb7d9a06fea1be" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.815686 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-tgczc" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.822114 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-56n9r" event={"ID":"6c242ec4-4379-47ff-9e7d-aadf904daf58","Type":"ContainerDied","Data":"1d98aa78277e17c77cc7915c6ec95ce7705d675c51a9db0ede333e7f3601410d"} Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.822155 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-56n9r" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.856661 4747 scope.go:117] "RemoveContainer" containerID="06ecf259f43545e72d5c90288b90be71531c2693149164b9974dfc59cbaa0146" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.875489 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-56n9r"] Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.883440 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-56n9r"] Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.884117 4747 scope.go:117] "RemoveContainer" containerID="874606643d8bee1ded96b8a5433e6f67781043f7de215ab75d3fd0768f73d7a2" Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.902246 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-tgczc"] Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.910433 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-tgczc"] Oct 01 06:29:28 crc kubenswrapper[4747]: I1001 06:29:28.914643 4747 scope.go:117] "RemoveContainer" containerID="ffa4870636538fb57b4f26bf56b6eb1303699a2f2b9b1c9ee3d34547716306d8" Oct 01 06:29:29 crc kubenswrapper[4747]: I1001 06:29:29.165514 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-ftl54"] Oct 01 06:29:29 crc kubenswrapper[4747]: W1001 06:29:29.178784 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8482a7f4_08c7_4bc5_bef0_b44cd14ca523.slice/crio-af04f4c658ea14636bce8ddf14e89c55a4bd70f3be4c8682ffe9585ed32706b0 WatchSource:0}: Error finding container af04f4c658ea14636bce8ddf14e89c55a4bd70f3be4c8682ffe9585ed32706b0: Status 404 returned error can't find the container with id af04f4c658ea14636bce8ddf14e89c55a4bd70f3be4c8682ffe9585ed32706b0 Oct 01 06:29:29 crc kubenswrapper[4747]: I1001 06:29:29.289056 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c242ec4-4379-47ff-9e7d-aadf904daf58" path="/var/lib/kubelet/pods/6c242ec4-4379-47ff-9e7d-aadf904daf58/volumes" Oct 01 06:29:29 crc kubenswrapper[4747]: I1001 06:29:29.290162 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec39777a-4bcc-43b6-976a-80c654371512" path="/var/lib/kubelet/pods/ec39777a-4bcc-43b6-976a-80c654371512/volumes" Oct 01 06:29:29 crc kubenswrapper[4747]: I1001 06:29:29.835089 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-ftl54" event={"ID":"8482a7f4-08c7-4bc5-bef0-b44cd14ca523","Type":"ContainerStarted","Data":"af04f4c658ea14636bce8ddf14e89c55a4bd70f3be4c8682ffe9585ed32706b0"} Oct 01 06:29:30 crc kubenswrapper[4747]: I1001 06:29:30.845554 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-ftl54" event={"ID":"8482a7f4-08c7-4bc5-bef0-b44cd14ca523","Type":"ContainerStarted","Data":"88d5696ebff8cf9f2d1cf047769cbf693e18579a32ee2fc60928098ef4f9d40d"} Oct 01 06:29:30 crc kubenswrapper[4747]: I1001 06:29:30.865667 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-ftl54" podStartSLOduration=2.406846897 podStartE2EDuration="2.865642394s" podCreationTimestamp="2025-10-01 06:29:28 +0000 UTC" firstStartedPulling="2025-10-01 06:29:29.186069527 +0000 UTC m=+770.595726616" lastFinishedPulling="2025-10-01 06:29:29.644865034 +0000 UTC m=+771.054522113" observedRunningTime="2025-10-01 06:29:30.864659179 +0000 UTC m=+772.274316228" watchObservedRunningTime="2025-10-01 06:29:30.865642394 +0000 UTC m=+772.275299473" Oct 01 06:29:35 crc kubenswrapper[4747]: I1001 06:29:35.761278 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:29:35 crc kubenswrapper[4747]: I1001 06:29:35.761612 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:29:38 crc kubenswrapper[4747]: I1001 06:29:38.652837 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:38 crc kubenswrapper[4747]: I1001 06:29:38.653200 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:38 crc kubenswrapper[4747]: I1001 06:29:38.702308 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:38 crc kubenswrapper[4747]: I1001 06:29:38.948334 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-ftl54" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.507221 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l9vhd"] Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.508277 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.525338 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l9vhd"] Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.570251 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-utilities\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.570312 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrtg7\" (UniqueName: \"kubernetes.io/projected/99c0681a-184c-4d71-95e7-213cfdd04960-kube-api-access-hrtg7\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.570403 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-catalog-content\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.671261 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-utilities\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.671321 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrtg7\" (UniqueName: \"kubernetes.io/projected/99c0681a-184c-4d71-95e7-213cfdd04960-kube-api-access-hrtg7\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.671368 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-catalog-content\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.671734 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-utilities\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.671788 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-catalog-content\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.696392 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrtg7\" (UniqueName: \"kubernetes.io/projected/99c0681a-184c-4d71-95e7-213cfdd04960-kube-api-access-hrtg7\") pod \"certified-operators-l9vhd\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:39 crc kubenswrapper[4747]: I1001 06:29:39.827481 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:40 crc kubenswrapper[4747]: I1001 06:29:40.279724 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l9vhd"] Oct 01 06:29:40 crc kubenswrapper[4747]: I1001 06:29:40.923815 4747 generic.go:334] "Generic (PLEG): container finished" podID="99c0681a-184c-4d71-95e7-213cfdd04960" containerID="5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93" exitCode=0 Oct 01 06:29:40 crc kubenswrapper[4747]: I1001 06:29:40.923988 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9vhd" event={"ID":"99c0681a-184c-4d71-95e7-213cfdd04960","Type":"ContainerDied","Data":"5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93"} Oct 01 06:29:40 crc kubenswrapper[4747]: I1001 06:29:40.924059 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9vhd" event={"ID":"99c0681a-184c-4d71-95e7-213cfdd04960","Type":"ContainerStarted","Data":"5e25e300ad5e010e58a6093c5b6c2bd6cd2512f35a8343a11282de589c7a2d05"} Oct 01 06:29:42 crc kubenswrapper[4747]: I1001 06:29:42.957392 4747 generic.go:334] "Generic (PLEG): container finished" podID="99c0681a-184c-4d71-95e7-213cfdd04960" containerID="125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921" exitCode=0 Oct 01 06:29:42 crc kubenswrapper[4747]: I1001 06:29:42.957527 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9vhd" event={"ID":"99c0681a-184c-4d71-95e7-213cfdd04960","Type":"ContainerDied","Data":"125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921"} Oct 01 06:29:43 crc kubenswrapper[4747]: I1001 06:29:43.969716 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9vhd" event={"ID":"99c0681a-184c-4d71-95e7-213cfdd04960","Type":"ContainerStarted","Data":"cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c"} Oct 01 06:29:43 crc kubenswrapper[4747]: I1001 06:29:43.996520 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l9vhd" podStartSLOduration=2.558683124 podStartE2EDuration="4.996500125s" podCreationTimestamp="2025-10-01 06:29:39 +0000 UTC" firstStartedPulling="2025-10-01 06:29:40.9262435 +0000 UTC m=+782.335900579" lastFinishedPulling="2025-10-01 06:29:43.364060491 +0000 UTC m=+784.773717580" observedRunningTime="2025-10-01 06:29:43.993211522 +0000 UTC m=+785.402868651" watchObservedRunningTime="2025-10-01 06:29:43.996500125 +0000 UTC m=+785.406157224" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.514303 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jpshd"] Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.516904 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.531112 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpshd"] Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.638139 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgzb2\" (UniqueName: \"kubernetes.io/projected/df5e259a-b997-45d6-9ba2-94ded72e4de6-kube-api-access-wgzb2\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.638211 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-utilities\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.638621 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-catalog-content\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.740409 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgzb2\" (UniqueName: \"kubernetes.io/projected/df5e259a-b997-45d6-9ba2-94ded72e4de6-kube-api-access-wgzb2\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.740488 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-utilities\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.740633 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-catalog-content\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.741365 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-catalog-content\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.741904 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-utilities\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.759374 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgzb2\" (UniqueName: \"kubernetes.io/projected/df5e259a-b997-45d6-9ba2-94ded72e4de6-kube-api-access-wgzb2\") pod \"redhat-marketplace-jpshd\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:44 crc kubenswrapper[4747]: I1001 06:29:44.840700 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:45 crc kubenswrapper[4747]: I1001 06:29:45.323042 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpshd"] Oct 01 06:29:45 crc kubenswrapper[4747]: I1001 06:29:45.989637 4747 generic.go:334] "Generic (PLEG): container finished" podID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerID="d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08" exitCode=0 Oct 01 06:29:45 crc kubenswrapper[4747]: I1001 06:29:45.989710 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpshd" event={"ID":"df5e259a-b997-45d6-9ba2-94ded72e4de6","Type":"ContainerDied","Data":"d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08"} Oct 01 06:29:45 crc kubenswrapper[4747]: I1001 06:29:45.990048 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpshd" event={"ID":"df5e259a-b997-45d6-9ba2-94ded72e4de6","Type":"ContainerStarted","Data":"e37fcc5ba48fbd83c5770063b433c7cb143c6e0253aa54b7d8340965f93313d7"} Oct 01 06:29:48 crc kubenswrapper[4747]: I1001 06:29:48.007514 4747 generic.go:334] "Generic (PLEG): container finished" podID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerID="b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1" exitCode=0 Oct 01 06:29:48 crc kubenswrapper[4747]: I1001 06:29:48.007605 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpshd" event={"ID":"df5e259a-b997-45d6-9ba2-94ded72e4de6","Type":"ContainerDied","Data":"b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1"} Oct 01 06:29:49 crc kubenswrapper[4747]: I1001 06:29:49.022401 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpshd" event={"ID":"df5e259a-b997-45d6-9ba2-94ded72e4de6","Type":"ContainerStarted","Data":"6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619"} Oct 01 06:29:49 crc kubenswrapper[4747]: I1001 06:29:49.828184 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:49 crc kubenswrapper[4747]: I1001 06:29:49.828243 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:49 crc kubenswrapper[4747]: I1001 06:29:49.894381 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:49 crc kubenswrapper[4747]: I1001 06:29:49.920182 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jpshd" podStartSLOduration=3.513193296 podStartE2EDuration="5.920151341s" podCreationTimestamp="2025-10-01 06:29:44 +0000 UTC" firstStartedPulling="2025-10-01 06:29:45.991524922 +0000 UTC m=+787.401181971" lastFinishedPulling="2025-10-01 06:29:48.398482937 +0000 UTC m=+789.808140016" observedRunningTime="2025-10-01 06:29:49.047678521 +0000 UTC m=+790.457335610" watchObservedRunningTime="2025-10-01 06:29:49.920151341 +0000 UTC m=+791.329808420" Oct 01 06:29:50 crc kubenswrapper[4747]: I1001 06:29:50.108403 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:52 crc kubenswrapper[4747]: I1001 06:29:52.304339 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l9vhd"] Oct 01 06:29:52 crc kubenswrapper[4747]: I1001 06:29:52.305113 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l9vhd" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" containerName="registry-server" containerID="cri-o://cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c" gracePeriod=2 Oct 01 06:29:52 crc kubenswrapper[4747]: I1001 06:29:52.807867 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:52 crc kubenswrapper[4747]: I1001 06:29:52.967204 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-utilities\") pod \"99c0681a-184c-4d71-95e7-213cfdd04960\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " Oct 01 06:29:52 crc kubenswrapper[4747]: I1001 06:29:52.967496 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-catalog-content\") pod \"99c0681a-184c-4d71-95e7-213cfdd04960\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " Oct 01 06:29:52 crc kubenswrapper[4747]: I1001 06:29:52.967547 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrtg7\" (UniqueName: \"kubernetes.io/projected/99c0681a-184c-4d71-95e7-213cfdd04960-kube-api-access-hrtg7\") pod \"99c0681a-184c-4d71-95e7-213cfdd04960\" (UID: \"99c0681a-184c-4d71-95e7-213cfdd04960\") " Oct 01 06:29:52 crc kubenswrapper[4747]: I1001 06:29:52.967943 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-utilities" (OuterVolumeSpecName: "utilities") pod "99c0681a-184c-4d71-95e7-213cfdd04960" (UID: "99c0681a-184c-4d71-95e7-213cfdd04960"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:29:52 crc kubenswrapper[4747]: I1001 06:29:52.972986 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99c0681a-184c-4d71-95e7-213cfdd04960-kube-api-access-hrtg7" (OuterVolumeSpecName: "kube-api-access-hrtg7") pod "99c0681a-184c-4d71-95e7-213cfdd04960" (UID: "99c0681a-184c-4d71-95e7-213cfdd04960"). InnerVolumeSpecName "kube-api-access-hrtg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.063807 4747 generic.go:334] "Generic (PLEG): container finished" podID="99c0681a-184c-4d71-95e7-213cfdd04960" containerID="cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c" exitCode=0 Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.063849 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9vhd" event={"ID":"99c0681a-184c-4d71-95e7-213cfdd04960","Type":"ContainerDied","Data":"cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c"} Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.063870 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l9vhd" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.063887 4747 scope.go:117] "RemoveContainer" containerID="cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.063876 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l9vhd" event={"ID":"99c0681a-184c-4d71-95e7-213cfdd04960","Type":"ContainerDied","Data":"5e25e300ad5e010e58a6093c5b6c2bd6cd2512f35a8343a11282de589c7a2d05"} Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.068679 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrtg7\" (UniqueName: \"kubernetes.io/projected/99c0681a-184c-4d71-95e7-213cfdd04960-kube-api-access-hrtg7\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.068707 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.081293 4747 scope.go:117] "RemoveContainer" containerID="125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.099958 4747 scope.go:117] "RemoveContainer" containerID="5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.119763 4747 scope.go:117] "RemoveContainer" containerID="cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c" Oct 01 06:29:53 crc kubenswrapper[4747]: E1001 06:29:53.120122 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c\": container with ID starting with cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c not found: ID does not exist" containerID="cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.120164 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c"} err="failed to get container status \"cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c\": rpc error: code = NotFound desc = could not find container \"cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c\": container with ID starting with cfd998c8280fea7f2940c48550b8c6046e976b260699e88a15d6f2d0eb38737c not found: ID does not exist" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.120189 4747 scope.go:117] "RemoveContainer" containerID="125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921" Oct 01 06:29:53 crc kubenswrapper[4747]: E1001 06:29:53.120509 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921\": container with ID starting with 125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921 not found: ID does not exist" containerID="125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.120529 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921"} err="failed to get container status \"125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921\": rpc error: code = NotFound desc = could not find container \"125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921\": container with ID starting with 125f7de315a94dbef355fe0aa6f2ec2b338ccc7186b5a3d0b269e0a7577f2921 not found: ID does not exist" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.120542 4747 scope.go:117] "RemoveContainer" containerID="5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93" Oct 01 06:29:53 crc kubenswrapper[4747]: E1001 06:29:53.120765 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93\": container with ID starting with 5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93 not found: ID does not exist" containerID="5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.120799 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93"} err="failed to get container status \"5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93\": rpc error: code = NotFound desc = could not find container \"5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93\": container with ID starting with 5d9c3c17159fd59fd7c57e4bbc54afbb858ac0a2c18fbe7c6ef05c72c2a1ae93 not found: ID does not exist" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.793869 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99c0681a-184c-4d71-95e7-213cfdd04960" (UID: "99c0681a-184c-4d71-95e7-213cfdd04960"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:29:53 crc kubenswrapper[4747]: I1001 06:29:53.880599 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99c0681a-184c-4d71-95e7-213cfdd04960-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.008625 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l9vhd"] Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.014713 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l9vhd"] Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.167380 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk"] Oct 01 06:29:54 crc kubenswrapper[4747]: E1001 06:29:54.167887 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" containerName="registry-server" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.167925 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" containerName="registry-server" Oct 01 06:29:54 crc kubenswrapper[4747]: E1001 06:29:54.167953 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" containerName="extract-content" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.167969 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" containerName="extract-content" Oct 01 06:29:54 crc kubenswrapper[4747]: E1001 06:29:54.167997 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" containerName="extract-utilities" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.168013 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" containerName="extract-utilities" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.168219 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" containerName="registry-server" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.169961 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.174089 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-b9vtl" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.184437 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk"] Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.285845 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-bundle\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.285900 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwhs7\" (UniqueName: \"kubernetes.io/projected/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-kube-api-access-dwhs7\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.285934 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-util\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.387079 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-bundle\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.387161 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwhs7\" (UniqueName: \"kubernetes.io/projected/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-kube-api-access-dwhs7\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.387256 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-util\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.387602 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-bundle\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.388015 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-util\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.420355 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwhs7\" (UniqueName: \"kubernetes.io/projected/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-kube-api-access-dwhs7\") pod \"e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.486174 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.841166 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.841502 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.915387 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:54 crc kubenswrapper[4747]: I1001 06:29:54.976663 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk"] Oct 01 06:29:55 crc kubenswrapper[4747]: I1001 06:29:55.083500 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" event={"ID":"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828","Type":"ContainerStarted","Data":"1b019bf0b0b5bf26f6ca26a614a5bc77986fc4b0acc6694bfee332daedf714f0"} Oct 01 06:29:55 crc kubenswrapper[4747]: I1001 06:29:55.140925 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:29:55 crc kubenswrapper[4747]: I1001 06:29:55.290037 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99c0681a-184c-4d71-95e7-213cfdd04960" path="/var/lib/kubelet/pods/99c0681a-184c-4d71-95e7-213cfdd04960/volumes" Oct 01 06:29:56 crc kubenswrapper[4747]: I1001 06:29:56.092802 4747 generic.go:334] "Generic (PLEG): container finished" podID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerID="62704c83f9a5c78658a5a9fae77c9a82c822f3de991cf0a03cf03c0aa3d717f5" exitCode=0 Oct 01 06:29:56 crc kubenswrapper[4747]: I1001 06:29:56.092862 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" event={"ID":"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828","Type":"ContainerDied","Data":"62704c83f9a5c78658a5a9fae77c9a82c822f3de991cf0a03cf03c0aa3d717f5"} Oct 01 06:29:58 crc kubenswrapper[4747]: I1001 06:29:58.110552 4747 generic.go:334] "Generic (PLEG): container finished" podID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerID="76418aa836ee4ad29fab7e048240c0bc338de8dc11c0a5f51fc012270a48f2d4" exitCode=0 Oct 01 06:29:58 crc kubenswrapper[4747]: I1001 06:29:58.110683 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" event={"ID":"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828","Type":"ContainerDied","Data":"76418aa836ee4ad29fab7e048240c0bc338de8dc11c0a5f51fc012270a48f2d4"} Oct 01 06:29:59 crc kubenswrapper[4747]: I1001 06:29:59.121352 4747 generic.go:334] "Generic (PLEG): container finished" podID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerID="f632957beb032c6b5d5f4360c36b1c91826fa5115b85edf2e583cc7083b352cc" exitCode=0 Oct 01 06:29:59 crc kubenswrapper[4747]: I1001 06:29:59.121456 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" event={"ID":"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828","Type":"ContainerDied","Data":"f632957beb032c6b5d5f4360c36b1c91826fa5115b85edf2e583cc7083b352cc"} Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.138701 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k"] Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.139403 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.141106 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.141295 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.149128 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k"] Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.269650 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf9b4\" (UniqueName: \"kubernetes.io/projected/10ee4413-e7a4-4891-b7a9-a0506b18461f-kube-api-access-zf9b4\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.269977 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10ee4413-e7a4-4891-b7a9-a0506b18461f-secret-volume\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.270026 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10ee4413-e7a4-4891-b7a9-a0506b18461f-config-volume\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.371175 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf9b4\" (UniqueName: \"kubernetes.io/projected/10ee4413-e7a4-4891-b7a9-a0506b18461f-kube-api-access-zf9b4\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.371264 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10ee4413-e7a4-4891-b7a9-a0506b18461f-secret-volume\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.371304 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10ee4413-e7a4-4891-b7a9-a0506b18461f-config-volume\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.372847 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10ee4413-e7a4-4891-b7a9-a0506b18461f-config-volume\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.377668 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.378037 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10ee4413-e7a4-4891-b7a9-a0506b18461f-secret-volume\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.391291 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf9b4\" (UniqueName: \"kubernetes.io/projected/10ee4413-e7a4-4891-b7a9-a0506b18461f-kube-api-access-zf9b4\") pod \"collect-profiles-29321670-p7w8k\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.452603 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.472184 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwhs7\" (UniqueName: \"kubernetes.io/projected/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-kube-api-access-dwhs7\") pod \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.472269 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-bundle\") pod \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.472413 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-util\") pod \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\" (UID: \"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828\") " Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.474137 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-bundle" (OuterVolumeSpecName: "bundle") pod "1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" (UID: "1023659f-3dcf-4a2c-8f6a-eeda4c6a0828"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.475848 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-kube-api-access-dwhs7" (OuterVolumeSpecName: "kube-api-access-dwhs7") pod "1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" (UID: "1023659f-3dcf-4a2c-8f6a-eeda4c6a0828"). InnerVolumeSpecName "kube-api-access-dwhs7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.493809 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-util" (OuterVolumeSpecName: "util") pod "1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" (UID: "1023659f-3dcf-4a2c-8f6a-eeda4c6a0828"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.575149 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-util\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.575586 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwhs7\" (UniqueName: \"kubernetes.io/projected/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-kube-api-access-dwhs7\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.575617 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1023659f-3dcf-4a2c-8f6a-eeda4c6a0828-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:00 crc kubenswrapper[4747]: I1001 06:30:00.901625 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k"] Oct 01 06:30:00 crc kubenswrapper[4747]: W1001 06:30:00.911465 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10ee4413_e7a4_4891_b7a9_a0506b18461f.slice/crio-e20016dd458c45ca20dd52eee7a5ab7243d6c694f60a1d4febc3f3a30f85cf0c WatchSource:0}: Error finding container e20016dd458c45ca20dd52eee7a5ab7243d6c694f60a1d4febc3f3a30f85cf0c: Status 404 returned error can't find the container with id e20016dd458c45ca20dd52eee7a5ab7243d6c694f60a1d4febc3f3a30f85cf0c Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.101080 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpshd"] Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.101488 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jpshd" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerName="registry-server" containerID="cri-o://6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619" gracePeriod=2 Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.136497 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" event={"ID":"10ee4413-e7a4-4891-b7a9-a0506b18461f","Type":"ContainerStarted","Data":"3531566d20310d0753ae8a188351c7eff13f94c0eed8db554ad578c8900e4fc9"} Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.136543 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" event={"ID":"10ee4413-e7a4-4891-b7a9-a0506b18461f","Type":"ContainerStarted","Data":"e20016dd458c45ca20dd52eee7a5ab7243d6c694f60a1d4febc3f3a30f85cf0c"} Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.140312 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" event={"ID":"1023659f-3dcf-4a2c-8f6a-eeda4c6a0828","Type":"ContainerDied","Data":"1b019bf0b0b5bf26f6ca26a614a5bc77986fc4b0acc6694bfee332daedf714f0"} Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.140345 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b019bf0b0b5bf26f6ca26a614a5bc77986fc4b0acc6694bfee332daedf714f0" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.140383 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.159375 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" podStartSLOduration=1.159347222 podStartE2EDuration="1.159347222s" podCreationTimestamp="2025-10-01 06:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:30:01.154498059 +0000 UTC m=+802.564155188" watchObservedRunningTime="2025-10-01 06:30:01.159347222 +0000 UTC m=+802.569004301" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.513121 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.590202 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-utilities\") pod \"df5e259a-b997-45d6-9ba2-94ded72e4de6\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.590275 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgzb2\" (UniqueName: \"kubernetes.io/projected/df5e259a-b997-45d6-9ba2-94ded72e4de6-kube-api-access-wgzb2\") pod \"df5e259a-b997-45d6-9ba2-94ded72e4de6\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.590339 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-catalog-content\") pod \"df5e259a-b997-45d6-9ba2-94ded72e4de6\" (UID: \"df5e259a-b997-45d6-9ba2-94ded72e4de6\") " Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.591346 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-utilities" (OuterVolumeSpecName: "utilities") pod "df5e259a-b997-45d6-9ba2-94ded72e4de6" (UID: "df5e259a-b997-45d6-9ba2-94ded72e4de6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.598283 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df5e259a-b997-45d6-9ba2-94ded72e4de6-kube-api-access-wgzb2" (OuterVolumeSpecName: "kube-api-access-wgzb2") pod "df5e259a-b997-45d6-9ba2-94ded72e4de6" (UID: "df5e259a-b997-45d6-9ba2-94ded72e4de6"). InnerVolumeSpecName "kube-api-access-wgzb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.608363 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df5e259a-b997-45d6-9ba2-94ded72e4de6" (UID: "df5e259a-b997-45d6-9ba2-94ded72e4de6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.692140 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.692214 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgzb2\" (UniqueName: \"kubernetes.io/projected/df5e259a-b997-45d6-9ba2-94ded72e4de6-kube-api-access-wgzb2\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:01 crc kubenswrapper[4747]: I1001 06:30:01.692241 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df5e259a-b997-45d6-9ba2-94ded72e4de6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.149038 4747 generic.go:334] "Generic (PLEG): container finished" podID="10ee4413-e7a4-4891-b7a9-a0506b18461f" containerID="3531566d20310d0753ae8a188351c7eff13f94c0eed8db554ad578c8900e4fc9" exitCode=0 Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.149147 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" event={"ID":"10ee4413-e7a4-4891-b7a9-a0506b18461f","Type":"ContainerDied","Data":"3531566d20310d0753ae8a188351c7eff13f94c0eed8db554ad578c8900e4fc9"} Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.152934 4747 generic.go:334] "Generic (PLEG): container finished" podID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerID="6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619" exitCode=0 Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.152987 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpshd" event={"ID":"df5e259a-b997-45d6-9ba2-94ded72e4de6","Type":"ContainerDied","Data":"6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619"} Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.153004 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jpshd" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.153016 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jpshd" event={"ID":"df5e259a-b997-45d6-9ba2-94ded72e4de6","Type":"ContainerDied","Data":"e37fcc5ba48fbd83c5770063b433c7cb143c6e0253aa54b7d8340965f93313d7"} Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.153068 4747 scope.go:117] "RemoveContainer" containerID="6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.190591 4747 scope.go:117] "RemoveContainer" containerID="b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.206834 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpshd"] Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.211349 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jpshd"] Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.224155 4747 scope.go:117] "RemoveContainer" containerID="d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.245868 4747 scope.go:117] "RemoveContainer" containerID="6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619" Oct 01 06:30:02 crc kubenswrapper[4747]: E1001 06:30:02.246145 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619\": container with ID starting with 6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619 not found: ID does not exist" containerID="6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.246172 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619"} err="failed to get container status \"6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619\": rpc error: code = NotFound desc = could not find container \"6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619\": container with ID starting with 6476a40875e836befd11e8b967573795603103af1d8c74aec0f85ba3f48da619 not found: ID does not exist" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.246190 4747 scope.go:117] "RemoveContainer" containerID="b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1" Oct 01 06:30:02 crc kubenswrapper[4747]: E1001 06:30:02.246373 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1\": container with ID starting with b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1 not found: ID does not exist" containerID="b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.246392 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1"} err="failed to get container status \"b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1\": rpc error: code = NotFound desc = could not find container \"b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1\": container with ID starting with b079ce86dc07183f3ca842cfe2999c2f403e379272490a6e67666fbc1afafdf1 not found: ID does not exist" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.246404 4747 scope.go:117] "RemoveContainer" containerID="d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08" Oct 01 06:30:02 crc kubenswrapper[4747]: E1001 06:30:02.246613 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08\": container with ID starting with d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08 not found: ID does not exist" containerID="d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08" Oct 01 06:30:02 crc kubenswrapper[4747]: I1001 06:30:02.246632 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08"} err="failed to get container status \"d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08\": rpc error: code = NotFound desc = could not find container \"d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08\": container with ID starting with d917b19475804562f29ab51a4bdd9abe1193fbfcd42ceeed0777d04c6dae4e08 not found: ID does not exist" Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.292050 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" path="/var/lib/kubelet/pods/df5e259a-b997-45d6-9ba2-94ded72e4de6/volumes" Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.448012 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.613572 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10ee4413-e7a4-4891-b7a9-a0506b18461f-secret-volume\") pod \"10ee4413-e7a4-4891-b7a9-a0506b18461f\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.613818 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zf9b4\" (UniqueName: \"kubernetes.io/projected/10ee4413-e7a4-4891-b7a9-a0506b18461f-kube-api-access-zf9b4\") pod \"10ee4413-e7a4-4891-b7a9-a0506b18461f\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.613871 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10ee4413-e7a4-4891-b7a9-a0506b18461f-config-volume\") pod \"10ee4413-e7a4-4891-b7a9-a0506b18461f\" (UID: \"10ee4413-e7a4-4891-b7a9-a0506b18461f\") " Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.614597 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10ee4413-e7a4-4891-b7a9-a0506b18461f-config-volume" (OuterVolumeSpecName: "config-volume") pod "10ee4413-e7a4-4891-b7a9-a0506b18461f" (UID: "10ee4413-e7a4-4891-b7a9-a0506b18461f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.619837 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10ee4413-e7a4-4891-b7a9-a0506b18461f-kube-api-access-zf9b4" (OuterVolumeSpecName: "kube-api-access-zf9b4") pod "10ee4413-e7a4-4891-b7a9-a0506b18461f" (UID: "10ee4413-e7a4-4891-b7a9-a0506b18461f"). InnerVolumeSpecName "kube-api-access-zf9b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.620077 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10ee4413-e7a4-4891-b7a9-a0506b18461f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "10ee4413-e7a4-4891-b7a9-a0506b18461f" (UID: "10ee4413-e7a4-4891-b7a9-a0506b18461f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.715533 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/10ee4413-e7a4-4891-b7a9-a0506b18461f-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.715582 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/10ee4413-e7a4-4891-b7a9-a0506b18461f-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:03 crc kubenswrapper[4747]: I1001 06:30:03.715602 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zf9b4\" (UniqueName: \"kubernetes.io/projected/10ee4413-e7a4-4891-b7a9-a0506b18461f-kube-api-access-zf9b4\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:04 crc kubenswrapper[4747]: I1001 06:30:04.172004 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" event={"ID":"10ee4413-e7a4-4891-b7a9-a0506b18461f","Type":"ContainerDied","Data":"e20016dd458c45ca20dd52eee7a5ab7243d6c694f60a1d4febc3f3a30f85cf0c"} Oct 01 06:30:04 crc kubenswrapper[4747]: I1001 06:30:04.172074 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e20016dd458c45ca20dd52eee7a5ab7243d6c694f60a1d4febc3f3a30f85cf0c" Oct 01 06:30:04 crc kubenswrapper[4747]: I1001 06:30:04.172099 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321670-p7w8k" Oct 01 06:30:05 crc kubenswrapper[4747]: I1001 06:30:05.761379 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:30:05 crc kubenswrapper[4747]: I1001 06:30:05.761461 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.604390 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p"] Oct 01 06:30:06 crc kubenswrapper[4747]: E1001 06:30:06.605031 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerName="extract" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605056 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerName="extract" Oct 01 06:30:06 crc kubenswrapper[4747]: E1001 06:30:06.605078 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerName="extract-utilities" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605092 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerName="extract-utilities" Oct 01 06:30:06 crc kubenswrapper[4747]: E1001 06:30:06.605114 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerName="util" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605128 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerName="util" Oct 01 06:30:06 crc kubenswrapper[4747]: E1001 06:30:06.605149 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerName="registry-server" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605161 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerName="registry-server" Oct 01 06:30:06 crc kubenswrapper[4747]: E1001 06:30:06.605178 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerName="extract-content" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605191 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerName="extract-content" Oct 01 06:30:06 crc kubenswrapper[4747]: E1001 06:30:06.605211 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerName="pull" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605223 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerName="pull" Oct 01 06:30:06 crc kubenswrapper[4747]: E1001 06:30:06.605240 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ee4413-e7a4-4891-b7a9-a0506b18461f" containerName="collect-profiles" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605251 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ee4413-e7a4-4891-b7a9-a0506b18461f" containerName="collect-profiles" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605477 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="df5e259a-b997-45d6-9ba2-94ded72e4de6" containerName="registry-server" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605518 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="10ee4413-e7a4-4891-b7a9-a0506b18461f" containerName="collect-profiles" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.605534 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="1023659f-3dcf-4a2c-8f6a-eeda4c6a0828" containerName="extract" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.608140 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.610183 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-sz4pg" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.613319 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.615622 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p"] Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.761264 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c21a5c1e-b158-4adf-bc18-818df3862825-apiservice-cert\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.761325 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c21a5c1e-b158-4adf-bc18-818df3862825-webhook-cert\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.761389 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftll4\" (UniqueName: \"kubernetes.io/projected/c21a5c1e-b158-4adf-bc18-818df3862825-kube-api-access-ftll4\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.862523 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftll4\" (UniqueName: \"kubernetes.io/projected/c21a5c1e-b158-4adf-bc18-818df3862825-kube-api-access-ftll4\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.862610 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c21a5c1e-b158-4adf-bc18-818df3862825-apiservice-cert\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.862658 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c21a5c1e-b158-4adf-bc18-818df3862825-webhook-cert\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.866398 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c21a5c1e-b158-4adf-bc18-818df3862825-apiservice-cert\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.866996 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c21a5c1e-b158-4adf-bc18-818df3862825-webhook-cert\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.883630 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftll4\" (UniqueName: \"kubernetes.io/projected/c21a5c1e-b158-4adf-bc18-818df3862825-kube-api-access-ftll4\") pod \"infra-operator-controller-manager-68d7898b58-wqz8p\" (UID: \"c21a5c1e-b158-4adf-bc18-818df3862825\") " pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:06 crc kubenswrapper[4747]: I1001 06:30:06.932952 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:07 crc kubenswrapper[4747]: I1001 06:30:07.324375 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p"] Oct 01 06:30:08 crc kubenswrapper[4747]: I1001 06:30:08.199883 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" event={"ID":"c21a5c1e-b158-4adf-bc18-818df3862825","Type":"ContainerStarted","Data":"2c6a7f06f85f4cb48ae5efb5b85014eeeeef882cca86668415a76803e822efe1"} Oct 01 06:30:10 crc kubenswrapper[4747]: I1001 06:30:10.214312 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" event={"ID":"c21a5c1e-b158-4adf-bc18-818df3862825","Type":"ContainerStarted","Data":"26d6e88e77da47ae3d936f3228008281a3c2823b336ebecf1dc7f4ed9fa00ed2"} Oct 01 06:30:10 crc kubenswrapper[4747]: I1001 06:30:10.214682 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:10 crc kubenswrapper[4747]: I1001 06:30:10.214704 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" event={"ID":"c21a5c1e-b158-4adf-bc18-818df3862825","Type":"ContainerStarted","Data":"550b72f9023272389c91aa1b2e5023bf16c0f24f41df9772f873315a9ceeb5b0"} Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.874555 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" podStartSLOduration=4.797987405 podStartE2EDuration="6.874540712s" podCreationTimestamp="2025-10-01 06:30:06 +0000 UTC" firstStartedPulling="2025-10-01 06:30:07.343316593 +0000 UTC m=+808.752973642" lastFinishedPulling="2025-10-01 06:30:09.4198699 +0000 UTC m=+810.829526949" observedRunningTime="2025-10-01 06:30:10.255319808 +0000 UTC m=+811.664976937" watchObservedRunningTime="2025-10-01 06:30:12.874540712 +0000 UTC m=+814.284197751" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.876738 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.877594 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.879563 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"galera-openstack-dockercfg-ttt8c" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.879718 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openshift-service-ca.crt" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.879841 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"osp-secret" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.879896 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.881074 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config-data" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.881603 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"kube-root-ca.crt" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.896168 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.897258 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.901272 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.902806 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.904714 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.928546 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.931713 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.940604 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.940678 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.940732 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-kolla-config\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.940801 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/97ce40d4-b67f-474b-8e9f-9657e253305d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.940825 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj6w2\" (UniqueName: \"kubernetes.io/projected/97ce40d4-b67f-474b-8e9f-9657e253305d-kube-api-access-hj6w2\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.940848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/97ce40d4-b67f-474b-8e9f-9657e253305d-secrets\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:12 crc kubenswrapper[4747]: I1001 06:30:12.940892 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-config-data-default\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.041757 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.041997 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-operator-scripts\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042083 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042169 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-kolla-config\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042242 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042342 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vhl7\" (UniqueName: \"kubernetes.io/projected/7fe827ea-ce04-449a-8a2c-5a99a3d76343-kube-api-access-2vhl7\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042181 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") device mount path \"/mnt/openstack/pv07\"" pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042409 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-config-data-default\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042539 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8589\" (UniqueName: \"kubernetes.io/projected/81b25fd7-ceb6-4b9f-9398-ac38129304a0-kube-api-access-f8589\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042597 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-kolla-config\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042659 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-kolla-config\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042727 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042778 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/97ce40d4-b67f-474b-8e9f-9657e253305d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042800 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7fe827ea-ce04-449a-8a2c-5a99a3d76343-secrets\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042828 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj6w2\" (UniqueName: \"kubernetes.io/projected/97ce40d4-b67f-474b-8e9f-9657e253305d-kube-api-access-hj6w2\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042850 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-config-data-default\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042865 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7fe827ea-ce04-449a-8a2c-5a99a3d76343-config-data-generated\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042894 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-operator-scripts\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042919 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/97ce40d4-b67f-474b-8e9f-9657e253305d-secrets\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042937 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/81b25fd7-ceb6-4b9f-9398-ac38129304a0-config-data-generated\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.042952 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/81b25fd7-ceb6-4b9f-9398-ac38129304a0-secrets\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.043198 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-config-data-default\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.043270 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/97ce40d4-b67f-474b-8e9f-9657e253305d-config-data-generated\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.043376 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-kolla-config\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.043948 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-config-data-default\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.044301 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/97ce40d4-b67f-474b-8e9f-9657e253305d-operator-scripts\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.051336 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/97ce40d4-b67f-474b-8e9f-9657e253305d-secrets\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.061203 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj6w2\" (UniqueName: \"kubernetes.io/projected/97ce40d4-b67f-474b-8e9f-9657e253305d-kube-api-access-hj6w2\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.062139 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-galera-0\" (UID: \"97ce40d4-b67f-474b-8e9f-9657e253305d\") " pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144787 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-config-data-default\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144820 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7fe827ea-ce04-449a-8a2c-5a99a3d76343-config-data-generated\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144845 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-operator-scripts\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144866 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/81b25fd7-ceb6-4b9f-9398-ac38129304a0-config-data-generated\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144880 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/81b25fd7-ceb6-4b9f-9398-ac38129304a0-secrets\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144904 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-operator-scripts\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144936 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-kolla-config\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144954 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144969 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vhl7\" (UniqueName: \"kubernetes.io/projected/7fe827ea-ce04-449a-8a2c-5a99a3d76343-kube-api-access-2vhl7\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.144985 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-config-data-default\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.145004 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8589\" (UniqueName: \"kubernetes.io/projected/81b25fd7-ceb6-4b9f-9398-ac38129304a0-kube-api-access-f8589\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.145356 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7fe827ea-ce04-449a-8a2c-5a99a3d76343-config-data-generated\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146000 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") device mount path \"/mnt/openstack/pv08\"" pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146040 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-config-data-default\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146089 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-kolla-config\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146286 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/81b25fd7-ceb6-4b9f-9398-ac38129304a0-config-data-generated\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146364 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") device mount path \"/mnt/openstack/pv12\"" pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146619 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-config-data-default\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146651 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-kolla-config\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146127 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146703 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7fe827ea-ce04-449a-8a2c-5a99a3d76343-secrets\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.146853 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-kolla-config\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.147075 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fe827ea-ce04-449a-8a2c-5a99a3d76343-operator-scripts\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.147171 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81b25fd7-ceb6-4b9f-9398-ac38129304a0-operator-scripts\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.153482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/81b25fd7-ceb6-4b9f-9398-ac38129304a0-secrets\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.155597 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7fe827ea-ce04-449a-8a2c-5a99a3d76343-secrets\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.169880 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.171292 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vhl7\" (UniqueName: \"kubernetes.io/projected/7fe827ea-ce04-449a-8a2c-5a99a3d76343-kube-api-access-2vhl7\") pod \"openstack-galera-2\" (UID: \"7fe827ea-ce04-449a-8a2c-5a99a3d76343\") " pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.171305 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8589\" (UniqueName: \"kubernetes.io/projected/81b25fd7-ceb6-4b9f-9398-ac38129304a0-kube-api-access-f8589\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.174807 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-1\" (UID: \"81b25fd7-ceb6-4b9f-9398-ac38129304a0\") " pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.194410 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.210221 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.217449 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.615419 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-0"] Oct 01 06:30:13 crc kubenswrapper[4747]: W1001 06:30:13.624738 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97ce40d4_b67f_474b_8e9f_9657e253305d.slice/crio-6f94bb3c4866062ca13719e399e558925758a2c2fa88ea1b0d43ebcb983d64a8 WatchSource:0}: Error finding container 6f94bb3c4866062ca13719e399e558925758a2c2fa88ea1b0d43ebcb983d64a8: Status 404 returned error can't find the container with id 6f94bb3c4866062ca13719e399e558925758a2c2fa88ea1b0d43ebcb983d64a8 Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.662985 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-2"] Oct 01 06:30:13 crc kubenswrapper[4747]: W1001 06:30:13.671866 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fe827ea_ce04_449a_8a2c_5a99a3d76343.slice/crio-840c5053bc9f93fe25b5ba9e95bb5974f6db73c73066879bba47bcfcbc3de16a WatchSource:0}: Error finding container 840c5053bc9f93fe25b5ba9e95bb5974f6db73c73066879bba47bcfcbc3de16a: Status 404 returned error can't find the container with id 840c5053bc9f93fe25b5ba9e95bb5974f6db73c73066879bba47bcfcbc3de16a Oct 01 06:30:13 crc kubenswrapper[4747]: I1001 06:30:13.675863 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstack-galera-1"] Oct 01 06:30:14 crc kubenswrapper[4747]: I1001 06:30:14.244694 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"81b25fd7-ceb6-4b9f-9398-ac38129304a0","Type":"ContainerStarted","Data":"d91a397c91e61c506b16a6ae9034e3b04237d3e9117efc9c779a02de03ec40de"} Oct 01 06:30:14 crc kubenswrapper[4747]: I1001 06:30:14.249381 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"7fe827ea-ce04-449a-8a2c-5a99a3d76343","Type":"ContainerStarted","Data":"840c5053bc9f93fe25b5ba9e95bb5974f6db73c73066879bba47bcfcbc3de16a"} Oct 01 06:30:14 crc kubenswrapper[4747]: I1001 06:30:14.252919 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"97ce40d4-b67f-474b-8e9f-9657e253305d","Type":"ContainerStarted","Data":"6f94bb3c4866062ca13719e399e558925758a2c2fa88ea1b0d43ebcb983d64a8"} Oct 01 06:30:16 crc kubenswrapper[4747]: I1001 06:30:16.938146 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-68d7898b58-wqz8p" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.312428 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7z6gv"] Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.315546 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.328984 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7z6gv"] Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.469312 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6svh\" (UniqueName: \"kubernetes.io/projected/8e71490b-81c7-4448-ae87-bbed0110efcd-kube-api-access-r6svh\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.469379 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-catalog-content\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.469399 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-utilities\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.570895 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6svh\" (UniqueName: \"kubernetes.io/projected/8e71490b-81c7-4448-ae87-bbed0110efcd-kube-api-access-r6svh\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.571425 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-catalog-content\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.571483 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-utilities\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.572011 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-catalog-content\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.572275 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-utilities\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.591416 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6svh\" (UniqueName: \"kubernetes.io/projected/8e71490b-81c7-4448-ae87-bbed0110efcd-kube-api-access-r6svh\") pod \"redhat-operators-7z6gv\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:21 crc kubenswrapper[4747]: I1001 06:30:21.651104 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:22 crc kubenswrapper[4747]: I1001 06:30:22.064216 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7z6gv"] Oct 01 06:30:22 crc kubenswrapper[4747]: I1001 06:30:22.319690 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"7fe827ea-ce04-449a-8a2c-5a99a3d76343","Type":"ContainerStarted","Data":"a2aee18c46a9862c29aac218d8efae3c510f7bbead17150add9d1cfd36ec4336"} Oct 01 06:30:22 crc kubenswrapper[4747]: I1001 06:30:22.321545 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"97ce40d4-b67f-474b-8e9f-9657e253305d","Type":"ContainerStarted","Data":"0bf4cb38627f6ca343c8b21335d30d89fcc7b186463ee744ad9d9312a25653d8"} Oct 01 06:30:22 crc kubenswrapper[4747]: I1001 06:30:22.323224 4747 generic.go:334] "Generic (PLEG): container finished" podID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerID="cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7" exitCode=0 Oct 01 06:30:22 crc kubenswrapper[4747]: I1001 06:30:22.323282 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7z6gv" event={"ID":"8e71490b-81c7-4448-ae87-bbed0110efcd","Type":"ContainerDied","Data":"cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7"} Oct 01 06:30:22 crc kubenswrapper[4747]: I1001 06:30:22.323298 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7z6gv" event={"ID":"8e71490b-81c7-4448-ae87-bbed0110efcd","Type":"ContainerStarted","Data":"bdea363a949a2f4c964e25e783c239357a5b8cb9ef861efaa80ca448fab98687"} Oct 01 06:30:22 crc kubenswrapper[4747]: I1001 06:30:22.324702 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"81b25fd7-ceb6-4b9f-9398-ac38129304a0","Type":"ContainerStarted","Data":"f4ff193186f20c80077ade95a5bf7f760171e000fba2ddf7168d78678db1d3a4"} Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.106762 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-5ts8w"] Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.107651 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.110318 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-72chq" Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.128848 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-5ts8w"] Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.203504 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlcxw\" (UniqueName: \"kubernetes.io/projected/2def1300-5b93-411b-b6ef-da3b1365e726-kube-api-access-tlcxw\") pod \"rabbitmq-cluster-operator-index-5ts8w\" (UID: \"2def1300-5b93-411b-b6ef-da3b1365e726\") " pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.305352 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlcxw\" (UniqueName: \"kubernetes.io/projected/2def1300-5b93-411b-b6ef-da3b1365e726-kube-api-access-tlcxw\") pod \"rabbitmq-cluster-operator-index-5ts8w\" (UID: \"2def1300-5b93-411b-b6ef-da3b1365e726\") " pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.328042 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlcxw\" (UniqueName: \"kubernetes.io/projected/2def1300-5b93-411b-b6ef-da3b1365e726-kube-api-access-tlcxw\") pod \"rabbitmq-cluster-operator-index-5ts8w\" (UID: \"2def1300-5b93-411b-b6ef-da3b1365e726\") " pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.452265 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" Oct 01 06:30:24 crc kubenswrapper[4747]: I1001 06:30:24.874003 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-5ts8w"] Oct 01 06:30:24 crc kubenswrapper[4747]: W1001 06:30:24.889407 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2def1300_5b93_411b_b6ef_da3b1365e726.slice/crio-e7eb238f7b1fe461131a77037e215492feb7d2735ef9e3099325f60d17ae02d4 WatchSource:0}: Error finding container e7eb238f7b1fe461131a77037e215492feb7d2735ef9e3099325f60d17ae02d4: Status 404 returned error can't find the container with id e7eb238f7b1fe461131a77037e215492feb7d2735ef9e3099325f60d17ae02d4 Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.349555 4747 generic.go:334] "Generic (PLEG): container finished" podID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerID="f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762" exitCode=0 Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.349642 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7z6gv" event={"ID":"8e71490b-81c7-4448-ae87-bbed0110efcd","Type":"ContainerDied","Data":"f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762"} Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.352571 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" event={"ID":"2def1300-5b93-411b-b6ef-da3b1365e726","Type":"ContainerStarted","Data":"e7eb238f7b1fe461131a77037e215492feb7d2735ef9e3099325f60d17ae02d4"} Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.427359 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/memcached-0"] Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.428224 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.433718 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"memcached-config-data" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.434108 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"memcached-memcached-dockercfg-mtkcv" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.448131 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/memcached-0"] Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.517714 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mwkz\" (UniqueName: \"kubernetes.io/projected/cca7231f-4324-4f02-8d14-f08c4b7382e3-kube-api-access-9mwkz\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.517789 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cca7231f-4324-4f02-8d14-f08c4b7382e3-kolla-config\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.517852 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cca7231f-4324-4f02-8d14-f08c4b7382e3-config-data\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.619402 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cca7231f-4324-4f02-8d14-f08c4b7382e3-config-data\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.619477 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mwkz\" (UniqueName: \"kubernetes.io/projected/cca7231f-4324-4f02-8d14-f08c4b7382e3-kube-api-access-9mwkz\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.619513 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cca7231f-4324-4f02-8d14-f08c4b7382e3-kolla-config\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.620702 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/cca7231f-4324-4f02-8d14-f08c4b7382e3-kolla-config\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.620733 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/cca7231f-4324-4f02-8d14-f08c4b7382e3-config-data\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.637510 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mwkz\" (UniqueName: \"kubernetes.io/projected/cca7231f-4324-4f02-8d14-f08c4b7382e3-kube-api-access-9mwkz\") pod \"memcached-0\" (UID: \"cca7231f-4324-4f02-8d14-f08c4b7382e3\") " pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:25 crc kubenswrapper[4747]: I1001 06:30:25.743297 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:26 crc kubenswrapper[4747]: I1001 06:30:26.105461 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/memcached-0"] Oct 01 06:30:26 crc kubenswrapper[4747]: W1001 06:30:26.175506 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcca7231f_4324_4f02_8d14_f08c4b7382e3.slice/crio-bb5c0da5ab0b87fc39cf5c54fc9533964799f80fb96b3174e3c1a24cfaa1699f WatchSource:0}: Error finding container bb5c0da5ab0b87fc39cf5c54fc9533964799f80fb96b3174e3c1a24cfaa1699f: Status 404 returned error can't find the container with id bb5c0da5ab0b87fc39cf5c54fc9533964799f80fb96b3174e3c1a24cfaa1699f Oct 01 06:30:26 crc kubenswrapper[4747]: I1001 06:30:26.359715 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/memcached-0" event={"ID":"cca7231f-4324-4f02-8d14-f08c4b7382e3","Type":"ContainerStarted","Data":"bb5c0da5ab0b87fc39cf5c54fc9533964799f80fb96b3174e3c1a24cfaa1699f"} Oct 01 06:30:26 crc kubenswrapper[4747]: I1001 06:30:26.362162 4747 generic.go:334] "Generic (PLEG): container finished" podID="7fe827ea-ce04-449a-8a2c-5a99a3d76343" containerID="a2aee18c46a9862c29aac218d8efae3c510f7bbead17150add9d1cfd36ec4336" exitCode=0 Oct 01 06:30:26 crc kubenswrapper[4747]: I1001 06:30:26.362235 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"7fe827ea-ce04-449a-8a2c-5a99a3d76343","Type":"ContainerDied","Data":"a2aee18c46a9862c29aac218d8efae3c510f7bbead17150add9d1cfd36ec4336"} Oct 01 06:30:26 crc kubenswrapper[4747]: I1001 06:30:26.364537 4747 generic.go:334] "Generic (PLEG): container finished" podID="97ce40d4-b67f-474b-8e9f-9657e253305d" containerID="0bf4cb38627f6ca343c8b21335d30d89fcc7b186463ee744ad9d9312a25653d8" exitCode=0 Oct 01 06:30:26 crc kubenswrapper[4747]: I1001 06:30:26.364603 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"97ce40d4-b67f-474b-8e9f-9657e253305d","Type":"ContainerDied","Data":"0bf4cb38627f6ca343c8b21335d30d89fcc7b186463ee744ad9d9312a25653d8"} Oct 01 06:30:26 crc kubenswrapper[4747]: I1001 06:30:26.366848 4747 generic.go:334] "Generic (PLEG): container finished" podID="81b25fd7-ceb6-4b9f-9398-ac38129304a0" containerID="f4ff193186f20c80077ade95a5bf7f760171e000fba2ddf7168d78678db1d3a4" exitCode=0 Oct 01 06:30:26 crc kubenswrapper[4747]: I1001 06:30:26.366868 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"81b25fd7-ceb6-4b9f-9398-ac38129304a0","Type":"ContainerDied","Data":"f4ff193186f20c80077ade95a5bf7f760171e000fba2ddf7168d78678db1d3a4"} Oct 01 06:30:27 crc kubenswrapper[4747]: I1001 06:30:27.375865 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7z6gv" event={"ID":"8e71490b-81c7-4448-ae87-bbed0110efcd","Type":"ContainerStarted","Data":"519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0"} Oct 01 06:30:27 crc kubenswrapper[4747]: I1001 06:30:27.394729 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7z6gv" podStartSLOduration=2.512709998 podStartE2EDuration="6.394711605s" podCreationTimestamp="2025-10-01 06:30:21 +0000 UTC" firstStartedPulling="2025-10-01 06:30:22.325594637 +0000 UTC m=+823.735251686" lastFinishedPulling="2025-10-01 06:30:26.207596234 +0000 UTC m=+827.617253293" observedRunningTime="2025-10-01 06:30:27.391582186 +0000 UTC m=+828.801239245" watchObservedRunningTime="2025-10-01 06:30:27.394711605 +0000 UTC m=+828.804368664" Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.393637 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" event={"ID":"2def1300-5b93-411b-b6ef-da3b1365e726","Type":"ContainerStarted","Data":"28a110bdf48346cef397f898556d8e2102e46bb35b873ebdee439d9ac850e60b"} Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.396461 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-1" event={"ID":"81b25fd7-ceb6-4b9f-9398-ac38129304a0","Type":"ContainerStarted","Data":"ffc2a2125f066d1f7109626dc68e82bcc530074b5b28e4a07b936d9654a6aec4"} Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.398047 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/memcached-0" event={"ID":"cca7231f-4324-4f02-8d14-f08c4b7382e3","Type":"ContainerStarted","Data":"1267a71f615c3991b55a7e4acab22630391906fda530ffed5090fa895b41208e"} Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.398203 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.400319 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-2" event={"ID":"7fe827ea-ce04-449a-8a2c-5a99a3d76343","Type":"ContainerStarted","Data":"5dd10f5941243c21f9121d122fd24e2bd7dc3bc37d878d7f70612053acf2454b"} Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.402700 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstack-galera-0" event={"ID":"97ce40d4-b67f-474b-8e9f-9657e253305d","Type":"ContainerStarted","Data":"b5aadfdb85b2a661aa270bdb2f3ab128dc8a79e7dabbdfc7fa80d69f8a415f29"} Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.415918 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" podStartSLOduration=1.638047423 podStartE2EDuration="5.41589587s" podCreationTimestamp="2025-10-01 06:30:24 +0000 UTC" firstStartedPulling="2025-10-01 06:30:24.89150021 +0000 UTC m=+826.301157259" lastFinishedPulling="2025-10-01 06:30:28.669348657 +0000 UTC m=+830.079005706" observedRunningTime="2025-10-01 06:30:29.411351636 +0000 UTC m=+830.821008725" watchObservedRunningTime="2025-10-01 06:30:29.41589587 +0000 UTC m=+830.825552969" Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.435712 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-2" podStartSLOduration=10.716387898 podStartE2EDuration="18.435694007s" podCreationTimestamp="2025-10-01 06:30:11 +0000 UTC" firstStartedPulling="2025-10-01 06:30:13.675317618 +0000 UTC m=+815.084974707" lastFinishedPulling="2025-10-01 06:30:21.394623757 +0000 UTC m=+822.804280816" observedRunningTime="2025-10-01 06:30:29.433514653 +0000 UTC m=+830.843171722" watchObservedRunningTime="2025-10-01 06:30:29.435694007 +0000 UTC m=+830.845351096" Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.464600 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/memcached-0" podStartSLOduration=1.980764956 podStartE2EDuration="4.464576914s" podCreationTimestamp="2025-10-01 06:30:25 +0000 UTC" firstStartedPulling="2025-10-01 06:30:26.178074061 +0000 UTC m=+827.587731120" lastFinishedPulling="2025-10-01 06:30:28.661886029 +0000 UTC m=+830.071543078" observedRunningTime="2025-10-01 06:30:29.453338761 +0000 UTC m=+830.862995820" watchObservedRunningTime="2025-10-01 06:30:29.464576914 +0000 UTC m=+830.874233973" Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.476428 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-0" podStartSLOduration=10.749969733 podStartE2EDuration="18.476410281s" podCreationTimestamp="2025-10-01 06:30:11 +0000 UTC" firstStartedPulling="2025-10-01 06:30:13.627634179 +0000 UTC m=+815.037291218" lastFinishedPulling="2025-10-01 06:30:21.354074707 +0000 UTC m=+822.763731766" observedRunningTime="2025-10-01 06:30:29.470898833 +0000 UTC m=+830.880555892" watchObservedRunningTime="2025-10-01 06:30:29.476410281 +0000 UTC m=+830.886067340" Oct 01 06:30:29 crc kubenswrapper[4747]: I1001 06:30:29.491568 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstack-galera-1" podStartSLOduration=10.733028406 podStartE2EDuration="18.491550482s" podCreationTimestamp="2025-10-01 06:30:11 +0000 UTC" firstStartedPulling="2025-10-01 06:30:13.687511184 +0000 UTC m=+815.097168263" lastFinishedPulling="2025-10-01 06:30:21.44603329 +0000 UTC m=+822.855690339" observedRunningTime="2025-10-01 06:30:29.490512316 +0000 UTC m=+830.900169365" watchObservedRunningTime="2025-10-01 06:30:29.491550482 +0000 UTC m=+830.901207521" Oct 01 06:30:30 crc kubenswrapper[4747]: I1001 06:30:30.111428 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-5ts8w"] Oct 01 06:30:30 crc kubenswrapper[4747]: I1001 06:30:30.902458 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-czqvp"] Oct 01 06:30:30 crc kubenswrapper[4747]: I1001 06:30:30.903171 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:30 crc kubenswrapper[4747]: I1001 06:30:30.946268 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-czqvp"] Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.002577 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd8x2\" (UniqueName: \"kubernetes.io/projected/ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86-kube-api-access-dd8x2\") pod \"rabbitmq-cluster-operator-index-czqvp\" (UID: \"ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86\") " pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.104571 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd8x2\" (UniqueName: \"kubernetes.io/projected/ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86-kube-api-access-dd8x2\") pod \"rabbitmq-cluster-operator-index-czqvp\" (UID: \"ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86\") " pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.125808 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd8x2\" (UniqueName: \"kubernetes.io/projected/ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86-kube-api-access-dd8x2\") pod \"rabbitmq-cluster-operator-index-czqvp\" (UID: \"ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86\") " pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.224171 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.414828 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" podUID="2def1300-5b93-411b-b6ef-da3b1365e726" containerName="registry-server" containerID="cri-o://28a110bdf48346cef397f898556d8e2102e46bb35b873ebdee439d9ac850e60b" gracePeriod=2 Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.646837 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-czqvp"] Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.651992 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.652054 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:31 crc kubenswrapper[4747]: W1001 06:30:31.662013 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce3e3ea5_74d4_4cfb_b5d3_2dd3861d9c86.slice/crio-13141b613ce32f4b8f911d5ec753669ce9926e6b9bd5e0aec6ad8611bd52a1b7 WatchSource:0}: Error finding container 13141b613ce32f4b8f911d5ec753669ce9926e6b9bd5e0aec6ad8611bd52a1b7: Status 404 returned error can't find the container with id 13141b613ce32f4b8f911d5ec753669ce9926e6b9bd5e0aec6ad8611bd52a1b7 Oct 01 06:30:31 crc kubenswrapper[4747]: I1001 06:30:31.739170 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.424524 4747 generic.go:334] "Generic (PLEG): container finished" podID="2def1300-5b93-411b-b6ef-da3b1365e726" containerID="28a110bdf48346cef397f898556d8e2102e46bb35b873ebdee439d9ac850e60b" exitCode=0 Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.424572 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" event={"ID":"2def1300-5b93-411b-b6ef-da3b1365e726","Type":"ContainerDied","Data":"28a110bdf48346cef397f898556d8e2102e46bb35b873ebdee439d9ac850e60b"} Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.424680 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" event={"ID":"2def1300-5b93-411b-b6ef-da3b1365e726","Type":"ContainerDied","Data":"e7eb238f7b1fe461131a77037e215492feb7d2735ef9e3099325f60d17ae02d4"} Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.424696 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7eb238f7b1fe461131a77037e215492feb7d2735ef9e3099325f60d17ae02d4" Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.426335 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" event={"ID":"ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86","Type":"ContainerStarted","Data":"13141b613ce32f4b8f911d5ec753669ce9926e6b9bd5e0aec6ad8611bd52a1b7"} Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.465589 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.493208 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.625061 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlcxw\" (UniqueName: \"kubernetes.io/projected/2def1300-5b93-411b-b6ef-da3b1365e726-kube-api-access-tlcxw\") pod \"2def1300-5b93-411b-b6ef-da3b1365e726\" (UID: \"2def1300-5b93-411b-b6ef-da3b1365e726\") " Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.634131 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2def1300-5b93-411b-b6ef-da3b1365e726-kube-api-access-tlcxw" (OuterVolumeSpecName: "kube-api-access-tlcxw") pod "2def1300-5b93-411b-b6ef-da3b1365e726" (UID: "2def1300-5b93-411b-b6ef-da3b1365e726"). InnerVolumeSpecName "kube-api-access-tlcxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:30:32 crc kubenswrapper[4747]: I1001 06:30:32.727382 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlcxw\" (UniqueName: \"kubernetes.io/projected/2def1300-5b93-411b-b6ef-da3b1365e726-kube-api-access-tlcxw\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.196565 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.196905 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.210873 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.210950 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.218036 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.218351 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.439142 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" event={"ID":"ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86","Type":"ContainerStarted","Data":"03e82be849ebfa717381c500c5e90edb96783e699c87de60c685258274f015bc"} Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.439322 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-5ts8w" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.472282 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" podStartSLOduration=2.26609309 podStartE2EDuration="3.472256061s" podCreationTimestamp="2025-10-01 06:30:30 +0000 UTC" firstStartedPulling="2025-10-01 06:30:31.666562974 +0000 UTC m=+833.076220033" lastFinishedPulling="2025-10-01 06:30:32.872725915 +0000 UTC m=+834.282383004" observedRunningTime="2025-10-01 06:30:33.46626339 +0000 UTC m=+834.875920479" watchObservedRunningTime="2025-10-01 06:30:33.472256061 +0000 UTC m=+834.881913180" Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.496383 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-5ts8w"] Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.505556 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-5ts8w"] Oct 01 06:30:33 crc kubenswrapper[4747]: E1001 06:30:33.874647 4747 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.51:37532->38.102.83.51:44771: write tcp 38.102.83.51:37532->38.102.83.51:44771: write: broken pipe Oct 01 06:30:33 crc kubenswrapper[4747]: I1001 06:30:33.915114 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7z6gv"] Oct 01 06:30:34 crc kubenswrapper[4747]: I1001 06:30:34.448042 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7z6gv" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerName="registry-server" containerID="cri-o://519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0" gracePeriod=2 Oct 01 06:30:34 crc kubenswrapper[4747]: I1001 06:30:34.928285 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.066736 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-catalog-content\") pod \"8e71490b-81c7-4448-ae87-bbed0110efcd\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.066820 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-utilities\") pod \"8e71490b-81c7-4448-ae87-bbed0110efcd\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.066929 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6svh\" (UniqueName: \"kubernetes.io/projected/8e71490b-81c7-4448-ae87-bbed0110efcd-kube-api-access-r6svh\") pod \"8e71490b-81c7-4448-ae87-bbed0110efcd\" (UID: \"8e71490b-81c7-4448-ae87-bbed0110efcd\") " Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.067787 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-utilities" (OuterVolumeSpecName: "utilities") pod "8e71490b-81c7-4448-ae87-bbed0110efcd" (UID: "8e71490b-81c7-4448-ae87-bbed0110efcd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.073008 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e71490b-81c7-4448-ae87-bbed0110efcd-kube-api-access-r6svh" (OuterVolumeSpecName: "kube-api-access-r6svh") pod "8e71490b-81c7-4448-ae87-bbed0110efcd" (UID: "8e71490b-81c7-4448-ae87-bbed0110efcd"). InnerVolumeSpecName "kube-api-access-r6svh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.166951 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e71490b-81c7-4448-ae87-bbed0110efcd" (UID: "8e71490b-81c7-4448-ae87-bbed0110efcd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.168604 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6svh\" (UniqueName: \"kubernetes.io/projected/8e71490b-81c7-4448-ae87-bbed0110efcd-kube-api-access-r6svh\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.168661 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.168681 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e71490b-81c7-4448-ae87-bbed0110efcd-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.291454 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2def1300-5b93-411b-b6ef-da3b1365e726" path="/var/lib/kubelet/pods/2def1300-5b93-411b-b6ef-da3b1365e726/volumes" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.462224 4747 generic.go:334] "Generic (PLEG): container finished" podID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerID="519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0" exitCode=0 Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.462314 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7z6gv" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.462315 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7z6gv" event={"ID":"8e71490b-81c7-4448-ae87-bbed0110efcd","Type":"ContainerDied","Data":"519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0"} Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.462768 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7z6gv" event={"ID":"8e71490b-81c7-4448-ae87-bbed0110efcd","Type":"ContainerDied","Data":"bdea363a949a2f4c964e25e783c239357a5b8cb9ef861efaa80ca448fab98687"} Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.462789 4747 scope.go:117] "RemoveContainer" containerID="519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.485340 4747 scope.go:117] "RemoveContainer" containerID="f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.501948 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7z6gv"] Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.506886 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7z6gv"] Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.507500 4747 scope.go:117] "RemoveContainer" containerID="cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.550366 4747 scope.go:117] "RemoveContainer" containerID="519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0" Oct 01 06:30:35 crc kubenswrapper[4747]: E1001 06:30:35.550903 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0\": container with ID starting with 519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0 not found: ID does not exist" containerID="519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.550947 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0"} err="failed to get container status \"519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0\": rpc error: code = NotFound desc = could not find container \"519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0\": container with ID starting with 519cd866ccfe8c23c31e9dc2058653c9380fee11699f5a5326af88cdf382c5a0 not found: ID does not exist" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.550974 4747 scope.go:117] "RemoveContainer" containerID="f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762" Oct 01 06:30:35 crc kubenswrapper[4747]: E1001 06:30:35.551347 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762\": container with ID starting with f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762 not found: ID does not exist" containerID="f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.551406 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762"} err="failed to get container status \"f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762\": rpc error: code = NotFound desc = could not find container \"f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762\": container with ID starting with f8e104281eb97c12643a867b34e97b1ccec87deff7139c44ad11d54ad527b762 not found: ID does not exist" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.551449 4747 scope.go:117] "RemoveContainer" containerID="cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7" Oct 01 06:30:35 crc kubenswrapper[4747]: E1001 06:30:35.551831 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7\": container with ID starting with cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7 not found: ID does not exist" containerID="cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.551880 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7"} err="failed to get container status \"cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7\": rpc error: code = NotFound desc = could not find container \"cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7\": container with ID starting with cbac271c9744ec1bd0059472d0a532ae1652bcea501dfd9df78eece825c67fb7 not found: ID does not exist" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.745054 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/memcached-0" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.760884 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.760962 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.761021 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.761841 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"779a7206e770a3d3eff5fabc4a08045c36917bf566f468ea4269d33bee1db67f"} pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:30:35 crc kubenswrapper[4747]: I1001 06:30:35.761942 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" containerID="cri-o://779a7206e770a3d3eff5fabc4a08045c36917bf566f468ea4269d33bee1db67f" gracePeriod=600 Oct 01 06:30:36 crc kubenswrapper[4747]: I1001 06:30:36.472844 4747 generic.go:334] "Generic (PLEG): container finished" podID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerID="779a7206e770a3d3eff5fabc4a08045c36917bf566f468ea4269d33bee1db67f" exitCode=0 Oct 01 06:30:36 crc kubenswrapper[4747]: I1001 06:30:36.473151 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerDied","Data":"779a7206e770a3d3eff5fabc4a08045c36917bf566f468ea4269d33bee1db67f"} Oct 01 06:30:36 crc kubenswrapper[4747]: I1001 06:30:36.473183 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"c7fef4e888b7d3576c874bccbc790853fe54a02c39edee04d55581bad2028dc9"} Oct 01 06:30:36 crc kubenswrapper[4747]: I1001 06:30:36.473204 4747 scope.go:117] "RemoveContainer" containerID="fb86108ac3eb45a3fbb1aa165dbc43fde8305d3de0de6e143c3c6aadae17a056" Oct 01 06:30:37 crc kubenswrapper[4747]: I1001 06:30:37.295828 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" path="/var/lib/kubelet/pods/8e71490b-81c7-4448-ae87-bbed0110efcd/volumes" Oct 01 06:30:37 crc kubenswrapper[4747]: I1001 06:30:37.314910 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:37 crc kubenswrapper[4747]: I1001 06:30:37.387505 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-2" Oct 01 06:30:41 crc kubenswrapper[4747]: I1001 06:30:41.225229 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:41 crc kubenswrapper[4747]: I1001 06:30:41.227963 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:41 crc kubenswrapper[4747]: I1001 06:30:41.263722 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:41 crc kubenswrapper[4747]: I1001 06:30:41.547484 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-czqvp" Oct 01 06:30:43 crc kubenswrapper[4747]: I1001 06:30:43.280991 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="glance-kuttl-tests/openstack-galera-2" podUID="7fe827ea-ce04-449a-8a2c-5a99a3d76343" containerName="galera" probeResult="failure" output=< Oct 01 06:30:43 crc kubenswrapper[4747]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Oct 01 06:30:43 crc kubenswrapper[4747]: > Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.764453 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25"] Oct 01 06:30:44 crc kubenswrapper[4747]: E1001 06:30:44.764839 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2def1300-5b93-411b-b6ef-da3b1365e726" containerName="registry-server" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.764860 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2def1300-5b93-411b-b6ef-da3b1365e726" containerName="registry-server" Oct 01 06:30:44 crc kubenswrapper[4747]: E1001 06:30:44.764886 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerName="extract-utilities" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.764897 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerName="extract-utilities" Oct 01 06:30:44 crc kubenswrapper[4747]: E1001 06:30:44.764917 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerName="extract-content" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.764928 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerName="extract-content" Oct 01 06:30:44 crc kubenswrapper[4747]: E1001 06:30:44.764949 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerName="registry-server" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.764960 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerName="registry-server" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.765146 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e71490b-81c7-4448-ae87-bbed0110efcd" containerName="registry-server" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.765178 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2def1300-5b93-411b-b6ef-da3b1365e726" containerName="registry-server" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.766472 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.773373 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-b9vtl" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.787827 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25"] Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.932157 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vf67\" (UniqueName: \"kubernetes.io/projected/f13c90e4-af2a-4615-a726-3f5f36ad445e-kube-api-access-8vf67\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.932677 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:44 crc kubenswrapper[4747]: I1001 06:30:44.932780 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:45 crc kubenswrapper[4747]: I1001 06:30:45.034640 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:45 crc kubenswrapper[4747]: I1001 06:30:45.034733 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vf67\" (UniqueName: \"kubernetes.io/projected/f13c90e4-af2a-4615-a726-3f5f36ad445e-kube-api-access-8vf67\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:45 crc kubenswrapper[4747]: I1001 06:30:45.034853 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:45 crc kubenswrapper[4747]: I1001 06:30:45.035577 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:45 crc kubenswrapper[4747]: I1001 06:30:45.035734 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:45 crc kubenswrapper[4747]: I1001 06:30:45.054258 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vf67\" (UniqueName: \"kubernetes.io/projected/f13c90e4-af2a-4615-a726-3f5f36ad445e-kube-api-access-8vf67\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:45 crc kubenswrapper[4747]: I1001 06:30:45.088346 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:45 crc kubenswrapper[4747]: I1001 06:30:45.569585 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25"] Oct 01 06:30:45 crc kubenswrapper[4747]: W1001 06:30:45.577449 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf13c90e4_af2a_4615_a726_3f5f36ad445e.slice/crio-328438a046df6fd357398fbbe49c387d6e54975495d538a5ac41bc8f5523ac7b WatchSource:0}: Error finding container 328438a046df6fd357398fbbe49c387d6e54975495d538a5ac41bc8f5523ac7b: Status 404 returned error can't find the container with id 328438a046df6fd357398fbbe49c387d6e54975495d538a5ac41bc8f5523ac7b Oct 01 06:30:46 crc kubenswrapper[4747]: I1001 06:30:46.559888 4747 generic.go:334] "Generic (PLEG): container finished" podID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerID="3c8f958d353881ab2cbaeba51832ee26d4b024fc86b84c990c20ddfbfc53dd37" exitCode=0 Oct 01 06:30:46 crc kubenswrapper[4747]: I1001 06:30:46.559960 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" event={"ID":"f13c90e4-af2a-4615-a726-3f5f36ad445e","Type":"ContainerDied","Data":"3c8f958d353881ab2cbaeba51832ee26d4b024fc86b84c990c20ddfbfc53dd37"} Oct 01 06:30:46 crc kubenswrapper[4747]: I1001 06:30:46.560131 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" event={"ID":"f13c90e4-af2a-4615-a726-3f5f36ad445e","Type":"ContainerStarted","Data":"328438a046df6fd357398fbbe49c387d6e54975495d538a5ac41bc8f5523ac7b"} Oct 01 06:30:47 crc kubenswrapper[4747]: I1001 06:30:47.165348 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:47 crc kubenswrapper[4747]: I1001 06:30:47.219228 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-1" Oct 01 06:30:47 crc kubenswrapper[4747]: I1001 06:30:47.580346 4747 generic.go:334] "Generic (PLEG): container finished" podID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerID="f256a90737a81f953b6a71c314b9a3aed6a84054c2a8596da7f123f3e295bace" exitCode=0 Oct 01 06:30:47 crc kubenswrapper[4747]: I1001 06:30:47.581202 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" event={"ID":"f13c90e4-af2a-4615-a726-3f5f36ad445e","Type":"ContainerDied","Data":"f256a90737a81f953b6a71c314b9a3aed6a84054c2a8596da7f123f3e295bace"} Oct 01 06:30:48 crc kubenswrapper[4747]: I1001 06:30:48.593795 4747 generic.go:334] "Generic (PLEG): container finished" podID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerID="0d51c6cd8d799ea297aa6ca47778e8aface7706cc32c8e29b6f6bc7591a4816a" exitCode=0 Oct 01 06:30:48 crc kubenswrapper[4747]: I1001 06:30:48.593928 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" event={"ID":"f13c90e4-af2a-4615-a726-3f5f36ad445e","Type":"ContainerDied","Data":"0d51c6cd8d799ea297aa6ca47778e8aface7706cc32c8e29b6f6bc7591a4816a"} Oct 01 06:30:49 crc kubenswrapper[4747]: I1001 06:30:49.910544 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.003954 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-util\") pod \"f13c90e4-af2a-4615-a726-3f5f36ad445e\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.004021 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-bundle\") pod \"f13c90e4-af2a-4615-a726-3f5f36ad445e\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.004138 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vf67\" (UniqueName: \"kubernetes.io/projected/f13c90e4-af2a-4615-a726-3f5f36ad445e-kube-api-access-8vf67\") pod \"f13c90e4-af2a-4615-a726-3f5f36ad445e\" (UID: \"f13c90e4-af2a-4615-a726-3f5f36ad445e\") " Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.005011 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-bundle" (OuterVolumeSpecName: "bundle") pod "f13c90e4-af2a-4615-a726-3f5f36ad445e" (UID: "f13c90e4-af2a-4615-a726-3f5f36ad445e"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.012113 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f13c90e4-af2a-4615-a726-3f5f36ad445e-kube-api-access-8vf67" (OuterVolumeSpecName: "kube-api-access-8vf67") pod "f13c90e4-af2a-4615-a726-3f5f36ad445e" (UID: "f13c90e4-af2a-4615-a726-3f5f36ad445e"). InnerVolumeSpecName "kube-api-access-8vf67". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.018602 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-util" (OuterVolumeSpecName: "util") pod "f13c90e4-af2a-4615-a726-3f5f36ad445e" (UID: "f13c90e4-af2a-4615-a726-3f5f36ad445e"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.105234 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.105264 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vf67\" (UniqueName: \"kubernetes.io/projected/f13c90e4-af2a-4615-a726-3f5f36ad445e-kube-api-access-8vf67\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.105274 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f13c90e4-af2a-4615-a726-3f5f36ad445e-util\") on node \"crc\" DevicePath \"\"" Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.613849 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" event={"ID":"f13c90e4-af2a-4615-a726-3f5f36ad445e","Type":"ContainerDied","Data":"328438a046df6fd357398fbbe49c387d6e54975495d538a5ac41bc8f5523ac7b"} Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.614254 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="328438a046df6fd357398fbbe49c387d6e54975495d538a5ac41bc8f5523ac7b" Oct 01 06:30:50 crc kubenswrapper[4747]: I1001 06:30:50.614356 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25" Oct 01 06:30:51 crc kubenswrapper[4747]: I1001 06:30:51.760606 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:51 crc kubenswrapper[4747]: I1001 06:30:51.825035 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/openstack-galera-0" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.804145 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7"] Oct 01 06:30:59 crc kubenswrapper[4747]: E1001 06:30:59.804819 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerName="extract" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.804834 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerName="extract" Oct 01 06:30:59 crc kubenswrapper[4747]: E1001 06:30:59.804853 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerName="pull" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.804861 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerName="pull" Oct 01 06:30:59 crc kubenswrapper[4747]: E1001 06:30:59.804875 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerName="util" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.804883 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerName="util" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.805012 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f13c90e4-af2a-4615-a726-3f5f36ad445e" containerName="extract" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.805464 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.808624 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-hlmpc" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.831342 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7"] Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.848657 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6hwx\" (UniqueName: \"kubernetes.io/projected/c2a1092f-de97-4fa0-9922-3cbcb000f041-kube-api-access-z6hwx\") pod \"rabbitmq-cluster-operator-779fc9694b-78kw7\" (UID: \"c2a1092f-de97-4fa0-9922-3cbcb000f041\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.950808 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6hwx\" (UniqueName: \"kubernetes.io/projected/c2a1092f-de97-4fa0-9922-3cbcb000f041-kube-api-access-z6hwx\") pod \"rabbitmq-cluster-operator-779fc9694b-78kw7\" (UID: \"c2a1092f-de97-4fa0-9922-3cbcb000f041\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7" Oct 01 06:30:59 crc kubenswrapper[4747]: I1001 06:30:59.967656 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6hwx\" (UniqueName: \"kubernetes.io/projected/c2a1092f-de97-4fa0-9922-3cbcb000f041-kube-api-access-z6hwx\") pod \"rabbitmq-cluster-operator-779fc9694b-78kw7\" (UID: \"c2a1092f-de97-4fa0-9922-3cbcb000f041\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7" Oct 01 06:31:00 crc kubenswrapper[4747]: I1001 06:31:00.178284 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7" Oct 01 06:31:00 crc kubenswrapper[4747]: I1001 06:31:00.656673 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7"] Oct 01 06:31:00 crc kubenswrapper[4747]: I1001 06:31:00.692227 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7" event={"ID":"c2a1092f-de97-4fa0-9922-3cbcb000f041","Type":"ContainerStarted","Data":"50718bbb0170f9a06b0a262642864d380a9473bdb954df580a3dbc0a63c8230a"} Oct 01 06:31:02 crc kubenswrapper[4747]: I1001 06:31:02.710005 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7" event={"ID":"c2a1092f-de97-4fa0-9922-3cbcb000f041","Type":"ContainerStarted","Data":"50b6e5b0d245f6107f81112480272a93123a110ac8477ff7292ce87dc66515b5"} Oct 01 06:31:02 crc kubenswrapper[4747]: I1001 06:31:02.736617 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-78kw7" podStartSLOduration=1.9240716679999998 podStartE2EDuration="3.736589779s" podCreationTimestamp="2025-10-01 06:30:59 +0000 UTC" firstStartedPulling="2025-10-01 06:31:00.66563478 +0000 UTC m=+862.075291829" lastFinishedPulling="2025-10-01 06:31:02.478152891 +0000 UTC m=+863.887809940" observedRunningTime="2025-10-01 06:31:02.729455537 +0000 UTC m=+864.139112606" watchObservedRunningTime="2025-10-01 06:31:02.736589779 +0000 UTC m=+864.146246838" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.396376 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.398305 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.400554 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-default-user" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.402192 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-erlang-cookie" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.402555 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"rabbitmq-server-conf" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.402193 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"rabbitmq-plugins-conf" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.404502 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"rabbitmq-server-dockercfg-4sbzm" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.451874 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.451961 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcxcq\" (UniqueName: \"kubernetes.io/projected/265c0df4-e327-42c7-bd89-f88ad59209ec-kube-api-access-fcxcq\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.452164 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/265c0df4-e327-42c7-bd89-f88ad59209ec-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.452250 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.452323 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.452462 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/265c0df4-e327-42c7-bd89-f88ad59209ec-pod-info\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.452486 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/265c0df4-e327-42c7-bd89-f88ad59209ec-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.452588 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5995f6a8-2630-4706-a805-c7b56e3ad646\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5995f6a8-2630-4706-a805-c7b56e3ad646\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.471293 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.554225 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.554395 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/265c0df4-e327-42c7-bd89-f88ad59209ec-pod-info\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.554434 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/265c0df4-e327-42c7-bd89-f88ad59209ec-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.554480 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5995f6a8-2630-4706-a805-c7b56e3ad646\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5995f6a8-2630-4706-a805-c7b56e3ad646\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.554552 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.554610 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcxcq\" (UniqueName: \"kubernetes.io/projected/265c0df4-e327-42c7-bd89-f88ad59209ec-kube-api-access-fcxcq\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.554662 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/265c0df4-e327-42c7-bd89-f88ad59209ec-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.554710 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.555293 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/265c0df4-e327-42c7-bd89-f88ad59209ec-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.555308 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.555574 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.562729 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/265c0df4-e327-42c7-bd89-f88ad59209ec-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.567402 4747 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.567506 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5995f6a8-2630-4706-a805-c7b56e3ad646\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5995f6a8-2630-4706-a805-c7b56e3ad646\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/af8b19fb9f627e87db02a1966a79380812dfd66c6e1f6d9c9fafa7f6f5267848/globalmount\"" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.577213 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/265c0df4-e327-42c7-bd89-f88ad59209ec-pod-info\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.577813 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcxcq\" (UniqueName: \"kubernetes.io/projected/265c0df4-e327-42c7-bd89-f88ad59209ec-kube-api-access-fcxcq\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.594178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/265c0df4-e327-42c7-bd89-f88ad59209ec-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.624543 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5995f6a8-2630-4706-a805-c7b56e3ad646\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5995f6a8-2630-4706-a805-c7b56e3ad646\") pod \"rabbitmq-server-0\" (UID: \"265c0df4-e327-42c7-bd89-f88ad59209ec\") " pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:07 crc kubenswrapper[4747]: I1001 06:31:07.735193 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:08 crc kubenswrapper[4747]: I1001 06:31:08.175933 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/rabbitmq-server-0"] Oct 01 06:31:08 crc kubenswrapper[4747]: W1001 06:31:08.183669 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod265c0df4_e327_42c7_bd89_f88ad59209ec.slice/crio-b5ae6b18ee705ce0be4bcbe6300173952ccbc849efcee9d707d88c30978e9268 WatchSource:0}: Error finding container b5ae6b18ee705ce0be4bcbe6300173952ccbc849efcee9d707d88c30978e9268: Status 404 returned error can't find the container with id b5ae6b18ee705ce0be4bcbe6300173952ccbc849efcee9d707d88c30978e9268 Oct 01 06:31:08 crc kubenswrapper[4747]: I1001 06:31:08.754154 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"265c0df4-e327-42c7-bd89-f88ad59209ec","Type":"ContainerStarted","Data":"b5ae6b18ee705ce0be4bcbe6300173952ccbc849efcee9d707d88c30978e9268"} Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.115713 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-fsdwk"] Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.116531 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-fsdwk" Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.119816 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-2l47q" Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.123606 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-fsdwk"] Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.176455 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zdfb\" (UniqueName: \"kubernetes.io/projected/4c8e3fbf-453f-430b-9784-58a85509378b-kube-api-access-8zdfb\") pod \"keystone-operator-index-fsdwk\" (UID: \"4c8e3fbf-453f-430b-9784-58a85509378b\") " pod="openstack-operators/keystone-operator-index-fsdwk" Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.281834 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zdfb\" (UniqueName: \"kubernetes.io/projected/4c8e3fbf-453f-430b-9784-58a85509378b-kube-api-access-8zdfb\") pod \"keystone-operator-index-fsdwk\" (UID: \"4c8e3fbf-453f-430b-9784-58a85509378b\") " pod="openstack-operators/keystone-operator-index-fsdwk" Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.301145 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zdfb\" (UniqueName: \"kubernetes.io/projected/4c8e3fbf-453f-430b-9784-58a85509378b-kube-api-access-8zdfb\") pod \"keystone-operator-index-fsdwk\" (UID: \"4c8e3fbf-453f-430b-9784-58a85509378b\") " pod="openstack-operators/keystone-operator-index-fsdwk" Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.437407 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-fsdwk" Oct 01 06:31:09 crc kubenswrapper[4747]: I1001 06:31:09.823435 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-fsdwk"] Oct 01 06:31:11 crc kubenswrapper[4747]: I1001 06:31:11.775718 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-fsdwk" event={"ID":"4c8e3fbf-453f-430b-9784-58a85509378b","Type":"ContainerStarted","Data":"61730ff4b1d39e28f3602768416ff8321fda769b473a77de43d0a6d8abe385c9"} Oct 01 06:31:13 crc kubenswrapper[4747]: I1001 06:31:13.298732 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-fsdwk"] Oct 01 06:31:13 crc kubenswrapper[4747]: I1001 06:31:13.911009 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-nrlsf"] Oct 01 06:31:13 crc kubenswrapper[4747]: I1001 06:31:13.913129 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:13 crc kubenswrapper[4747]: I1001 06:31:13.924662 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-nrlsf"] Oct 01 06:31:13 crc kubenswrapper[4747]: I1001 06:31:13.953451 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z28z\" (UniqueName: \"kubernetes.io/projected/67b6bc26-65f0-403d-be25-31ff044af9a1-kube-api-access-7z28z\") pod \"keystone-operator-index-nrlsf\" (UID: \"67b6bc26-65f0-403d-be25-31ff044af9a1\") " pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.054867 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z28z\" (UniqueName: \"kubernetes.io/projected/67b6bc26-65f0-403d-be25-31ff044af9a1-kube-api-access-7z28z\") pod \"keystone-operator-index-nrlsf\" (UID: \"67b6bc26-65f0-403d-be25-31ff044af9a1\") " pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.080460 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z28z\" (UniqueName: \"kubernetes.io/projected/67b6bc26-65f0-403d-be25-31ff044af9a1-kube-api-access-7z28z\") pod \"keystone-operator-index-nrlsf\" (UID: \"67b6bc26-65f0-403d-be25-31ff044af9a1\") " pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.267641 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.654432 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-nrlsf"] Oct 01 06:31:14 crc kubenswrapper[4747]: W1001 06:31:14.657660 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67b6bc26_65f0_403d_be25_31ff044af9a1.slice/crio-b09f318142b9cd260de241d8c8b9960be007a0e9f49487d34910639b15d1f0cc WatchSource:0}: Error finding container b09f318142b9cd260de241d8c8b9960be007a0e9f49487d34910639b15d1f0cc: Status 404 returned error can't find the container with id b09f318142b9cd260de241d8c8b9960be007a0e9f49487d34910639b15d1f0cc Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.802646 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-nrlsf" event={"ID":"67b6bc26-65f0-403d-be25-31ff044af9a1","Type":"ContainerStarted","Data":"b09f318142b9cd260de241d8c8b9960be007a0e9f49487d34910639b15d1f0cc"} Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.804654 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-fsdwk" event={"ID":"4c8e3fbf-453f-430b-9784-58a85509378b","Type":"ContainerStarted","Data":"ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8"} Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.804847 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-fsdwk" podUID="4c8e3fbf-453f-430b-9784-58a85509378b" containerName="registry-server" containerID="cri-o://ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8" gracePeriod=2 Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.807406 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"265c0df4-e327-42c7-bd89-f88ad59209ec","Type":"ContainerStarted","Data":"6ff56a43b18ae3a8bbc06411062e0b0362fda920d5ce726259440baa11210513"} Oct 01 06:31:14 crc kubenswrapper[4747]: I1001 06:31:14.829185 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-fsdwk" podStartSLOduration=3.112121464 podStartE2EDuration="5.829166643s" podCreationTimestamp="2025-10-01 06:31:09 +0000 UTC" firstStartedPulling="2025-10-01 06:31:11.481866659 +0000 UTC m=+872.891523718" lastFinishedPulling="2025-10-01 06:31:14.198911848 +0000 UTC m=+875.608568897" observedRunningTime="2025-10-01 06:31:14.825511839 +0000 UTC m=+876.235168918" watchObservedRunningTime="2025-10-01 06:31:14.829166643 +0000 UTC m=+876.238823702" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.278923 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-fsdwk" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.368999 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zdfb\" (UniqueName: \"kubernetes.io/projected/4c8e3fbf-453f-430b-9784-58a85509378b-kube-api-access-8zdfb\") pod \"4c8e3fbf-453f-430b-9784-58a85509378b\" (UID: \"4c8e3fbf-453f-430b-9784-58a85509378b\") " Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.377028 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c8e3fbf-453f-430b-9784-58a85509378b-kube-api-access-8zdfb" (OuterVolumeSpecName: "kube-api-access-8zdfb") pod "4c8e3fbf-453f-430b-9784-58a85509378b" (UID: "4c8e3fbf-453f-430b-9784-58a85509378b"). InnerVolumeSpecName "kube-api-access-8zdfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.471329 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zdfb\" (UniqueName: \"kubernetes.io/projected/4c8e3fbf-453f-430b-9784-58a85509378b-kube-api-access-8zdfb\") on node \"crc\" DevicePath \"\"" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.817130 4747 generic.go:334] "Generic (PLEG): container finished" podID="4c8e3fbf-453f-430b-9784-58a85509378b" containerID="ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8" exitCode=0 Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.817192 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-fsdwk" event={"ID":"4c8e3fbf-453f-430b-9784-58a85509378b","Type":"ContainerDied","Data":"ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8"} Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.817251 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-fsdwk" event={"ID":"4c8e3fbf-453f-430b-9784-58a85509378b","Type":"ContainerDied","Data":"61730ff4b1d39e28f3602768416ff8321fda769b473a77de43d0a6d8abe385c9"} Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.817281 4747 scope.go:117] "RemoveContainer" containerID="ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.817838 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-fsdwk" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.820546 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-nrlsf" event={"ID":"67b6bc26-65f0-403d-be25-31ff044af9a1","Type":"ContainerStarted","Data":"2784751c30349a4f2c8be30cd5b7536220fcd43f49cdfc9a13b8c05cb9ef46d9"} Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.841732 4747 scope.go:117] "RemoveContainer" containerID="ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8" Oct 01 06:31:15 crc kubenswrapper[4747]: E1001 06:31:15.842267 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8\": container with ID starting with ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8 not found: ID does not exist" containerID="ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.842311 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8"} err="failed to get container status \"ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8\": rpc error: code = NotFound desc = could not find container \"ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8\": container with ID starting with ea748b242bd359346066ed7cb9fa01cd70d4191ad1b7291d04d34e19b3c364e8 not found: ID does not exist" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.855894 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-nrlsf" podStartSLOduration=2.369413873 podStartE2EDuration="2.855859153s" podCreationTimestamp="2025-10-01 06:31:13 +0000 UTC" firstStartedPulling="2025-10-01 06:31:14.662239707 +0000 UTC m=+876.071896766" lastFinishedPulling="2025-10-01 06:31:15.148684977 +0000 UTC m=+876.558342046" observedRunningTime="2025-10-01 06:31:15.841170258 +0000 UTC m=+877.250827387" watchObservedRunningTime="2025-10-01 06:31:15.855859153 +0000 UTC m=+877.265516272" Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.866239 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-fsdwk"] Oct 01 06:31:15 crc kubenswrapper[4747]: I1001 06:31:15.871337 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-fsdwk"] Oct 01 06:31:17 crc kubenswrapper[4747]: I1001 06:31:17.290095 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c8e3fbf-453f-430b-9784-58a85509378b" path="/var/lib/kubelet/pods/4c8e3fbf-453f-430b-9784-58a85509378b/volumes" Oct 01 06:31:24 crc kubenswrapper[4747]: I1001 06:31:24.268375 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:24 crc kubenswrapper[4747]: I1001 06:31:24.268907 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:24 crc kubenswrapper[4747]: I1001 06:31:24.299716 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:24 crc kubenswrapper[4747]: I1001 06:31:24.934219 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-nrlsf" Oct 01 06:31:26 crc kubenswrapper[4747]: I1001 06:31:26.971859 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj"] Oct 01 06:31:26 crc kubenswrapper[4747]: E1001 06:31:26.972138 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c8e3fbf-453f-430b-9784-58a85509378b" containerName="registry-server" Oct 01 06:31:26 crc kubenswrapper[4747]: I1001 06:31:26.972154 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c8e3fbf-453f-430b-9784-58a85509378b" containerName="registry-server" Oct 01 06:31:26 crc kubenswrapper[4747]: I1001 06:31:26.972291 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c8e3fbf-453f-430b-9784-58a85509378b" containerName="registry-server" Oct 01 06:31:26 crc kubenswrapper[4747]: I1001 06:31:26.973366 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:26 crc kubenswrapper[4747]: I1001 06:31:26.977278 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-b9vtl" Oct 01 06:31:26 crc kubenswrapper[4747]: I1001 06:31:26.987177 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj"] Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.047435 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nhjc\" (UniqueName: \"kubernetes.io/projected/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-kube-api-access-6nhjc\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.047565 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-bundle\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.047602 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-util\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.149058 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nhjc\" (UniqueName: \"kubernetes.io/projected/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-kube-api-access-6nhjc\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.149212 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-bundle\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.149266 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-util\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.150023 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-util\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.150209 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-bundle\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.169797 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nhjc\" (UniqueName: \"kubernetes.io/projected/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-kube-api-access-6nhjc\") pod \"6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.302731 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.594489 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj"] Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.932450 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" event={"ID":"dcea7572-1f85-4dcc-83b1-4b8dcf554ade","Type":"ContainerStarted","Data":"b16adb9b15459248ba68fda96e1c018e92e0e97dc6db22de104bc5a6cc514d28"} Oct 01 06:31:27 crc kubenswrapper[4747]: I1001 06:31:27.933021 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" event={"ID":"dcea7572-1f85-4dcc-83b1-4b8dcf554ade","Type":"ContainerStarted","Data":"854efa4fdda2d25292b2f0ecd69c54567011d2616ac144a59e28aba583cf40d7"} Oct 01 06:31:28 crc kubenswrapper[4747]: I1001 06:31:28.946094 4747 generic.go:334] "Generic (PLEG): container finished" podID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerID="b16adb9b15459248ba68fda96e1c018e92e0e97dc6db22de104bc5a6cc514d28" exitCode=0 Oct 01 06:31:28 crc kubenswrapper[4747]: I1001 06:31:28.946177 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" event={"ID":"dcea7572-1f85-4dcc-83b1-4b8dcf554ade","Type":"ContainerDied","Data":"b16adb9b15459248ba68fda96e1c018e92e0e97dc6db22de104bc5a6cc514d28"} Oct 01 06:31:30 crc kubenswrapper[4747]: I1001 06:31:30.968999 4747 generic.go:334] "Generic (PLEG): container finished" podID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerID="64dc6e6d5d1f5bf938d2809e140f04425f75d5fa5c1e0b8df5279a0d74abc89d" exitCode=0 Oct 01 06:31:30 crc kubenswrapper[4747]: I1001 06:31:30.969070 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" event={"ID":"dcea7572-1f85-4dcc-83b1-4b8dcf554ade","Type":"ContainerDied","Data":"64dc6e6d5d1f5bf938d2809e140f04425f75d5fa5c1e0b8df5279a0d74abc89d"} Oct 01 06:31:31 crc kubenswrapper[4747]: I1001 06:31:31.993324 4747 generic.go:334] "Generic (PLEG): container finished" podID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerID="4243b68c5289fec81b4856846b27aabe2bb8da6812a1bb9bdcd8b74fdb855251" exitCode=0 Oct 01 06:31:31 crc kubenswrapper[4747]: I1001 06:31:31.993680 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" event={"ID":"dcea7572-1f85-4dcc-83b1-4b8dcf554ade","Type":"ContainerDied","Data":"4243b68c5289fec81b4856846b27aabe2bb8da6812a1bb9bdcd8b74fdb855251"} Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.357069 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.452521 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nhjc\" (UniqueName: \"kubernetes.io/projected/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-kube-api-access-6nhjc\") pod \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.452609 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-bundle\") pod \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.452648 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-util\") pod \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\" (UID: \"dcea7572-1f85-4dcc-83b1-4b8dcf554ade\") " Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.453897 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-bundle" (OuterVolumeSpecName: "bundle") pod "dcea7572-1f85-4dcc-83b1-4b8dcf554ade" (UID: "dcea7572-1f85-4dcc-83b1-4b8dcf554ade"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.454689 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.461532 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-kube-api-access-6nhjc" (OuterVolumeSpecName: "kube-api-access-6nhjc") pod "dcea7572-1f85-4dcc-83b1-4b8dcf554ade" (UID: "dcea7572-1f85-4dcc-83b1-4b8dcf554ade"). InnerVolumeSpecName "kube-api-access-6nhjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.556194 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nhjc\" (UniqueName: \"kubernetes.io/projected/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-kube-api-access-6nhjc\") on node \"crc\" DevicePath \"\"" Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.581128 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-util" (OuterVolumeSpecName: "util") pod "dcea7572-1f85-4dcc-83b1-4b8dcf554ade" (UID: "dcea7572-1f85-4dcc-83b1-4b8dcf554ade"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:31:33 crc kubenswrapper[4747]: I1001 06:31:33.658081 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcea7572-1f85-4dcc-83b1-4b8dcf554ade-util\") on node \"crc\" DevicePath \"\"" Oct 01 06:31:34 crc kubenswrapper[4747]: I1001 06:31:34.020730 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" event={"ID":"dcea7572-1f85-4dcc-83b1-4b8dcf554ade","Type":"ContainerDied","Data":"854efa4fdda2d25292b2f0ecd69c54567011d2616ac144a59e28aba583cf40d7"} Oct 01 06:31:34 crc kubenswrapper[4747]: I1001 06:31:34.020844 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="854efa4fdda2d25292b2f0ecd69c54567011d2616ac144a59e28aba583cf40d7" Oct 01 06:31:34 crc kubenswrapper[4747]: I1001 06:31:34.020871 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.621988 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb"] Oct 01 06:31:44 crc kubenswrapper[4747]: E1001 06:31:44.622932 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerName="extract" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.622947 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerName="extract" Oct 01 06:31:44 crc kubenswrapper[4747]: E1001 06:31:44.622960 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerName="pull" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.622967 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerName="pull" Oct 01 06:31:44 crc kubenswrapper[4747]: E1001 06:31:44.622982 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerName="util" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.622990 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerName="util" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.623157 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcea7572-1f85-4dcc-83b1-4b8dcf554ade" containerName="extract" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.623988 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.625699 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.625879 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-x62pn" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.643830 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb"] Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.811155 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/23011ab7-244d-4f51-831e-6a2817fb36d2-apiservice-cert\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.811208 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/23011ab7-244d-4f51-831e-6a2817fb36d2-webhook-cert\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.811263 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cljvj\" (UniqueName: \"kubernetes.io/projected/23011ab7-244d-4f51-831e-6a2817fb36d2-kube-api-access-cljvj\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.912682 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/23011ab7-244d-4f51-831e-6a2817fb36d2-apiservice-cert\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.912729 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/23011ab7-244d-4f51-831e-6a2817fb36d2-webhook-cert\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.912786 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cljvj\" (UniqueName: \"kubernetes.io/projected/23011ab7-244d-4f51-831e-6a2817fb36d2-kube-api-access-cljvj\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.918190 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/23011ab7-244d-4f51-831e-6a2817fb36d2-webhook-cert\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.918207 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/23011ab7-244d-4f51-831e-6a2817fb36d2-apiservice-cert\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.927901 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cljvj\" (UniqueName: \"kubernetes.io/projected/23011ab7-244d-4f51-831e-6a2817fb36d2-kube-api-access-cljvj\") pod \"keystone-operator-controller-manager-56d5bb7f9d-8z2pb\" (UID: \"23011ab7-244d-4f51-831e-6a2817fb36d2\") " pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:44 crc kubenswrapper[4747]: I1001 06:31:44.949138 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:45 crc kubenswrapper[4747]: I1001 06:31:45.167574 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb"] Oct 01 06:31:46 crc kubenswrapper[4747]: I1001 06:31:46.114241 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" event={"ID":"23011ab7-244d-4f51-831e-6a2817fb36d2","Type":"ContainerStarted","Data":"615827edf680d08b19f4ca51f460767d530781dd47aef6791919f133aaf553d0"} Oct 01 06:31:48 crc kubenswrapper[4747]: I1001 06:31:48.132165 4747 generic.go:334] "Generic (PLEG): container finished" podID="265c0df4-e327-42c7-bd89-f88ad59209ec" containerID="6ff56a43b18ae3a8bbc06411062e0b0362fda920d5ce726259440baa11210513" exitCode=0 Oct 01 06:31:48 crc kubenswrapper[4747]: I1001 06:31:48.132334 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"265c0df4-e327-42c7-bd89-f88ad59209ec","Type":"ContainerDied","Data":"6ff56a43b18ae3a8bbc06411062e0b0362fda920d5ce726259440baa11210513"} Oct 01 06:31:49 crc kubenswrapper[4747]: I1001 06:31:49.146461 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/rabbitmq-server-0" event={"ID":"265c0df4-e327-42c7-bd89-f88ad59209ec","Type":"ContainerStarted","Data":"ffb6f14ca09bfa7bab1bbb9f0be15720ebc9974d53eb248aa25214e47863ed8b"} Oct 01 06:31:49 crc kubenswrapper[4747]: I1001 06:31:49.147085 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:31:49 crc kubenswrapper[4747]: I1001 06:31:49.181149 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/rabbitmq-server-0" podStartSLOduration=38.017687696 podStartE2EDuration="43.181132722s" podCreationTimestamp="2025-10-01 06:31:06 +0000 UTC" firstStartedPulling="2025-10-01 06:31:08.18688271 +0000 UTC m=+869.596539759" lastFinishedPulling="2025-10-01 06:31:13.350327736 +0000 UTC m=+874.759984785" observedRunningTime="2025-10-01 06:31:49.180709212 +0000 UTC m=+910.590366301" watchObservedRunningTime="2025-10-01 06:31:49.181132722 +0000 UTC m=+910.590789781" Oct 01 06:31:51 crc kubenswrapper[4747]: I1001 06:31:51.160066 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" event={"ID":"23011ab7-244d-4f51-831e-6a2817fb36d2","Type":"ContainerStarted","Data":"edb3a506c6f73c7200b7051cf61c46e758d43abe49c32021131259a96a631c4a"} Oct 01 06:31:51 crc kubenswrapper[4747]: I1001 06:31:51.160432 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" event={"ID":"23011ab7-244d-4f51-831e-6a2817fb36d2","Type":"ContainerStarted","Data":"4ef2043885b05ac7f48239cec44a4d19fb628778944311fc1ce3fda92e3ebe34"} Oct 01 06:31:51 crc kubenswrapper[4747]: I1001 06:31:51.160454 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:31:51 crc kubenswrapper[4747]: I1001 06:31:51.175539 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" podStartSLOduration=1.8672210919999999 podStartE2EDuration="7.17552324s" podCreationTimestamp="2025-10-01 06:31:44 +0000 UTC" firstStartedPulling="2025-10-01 06:31:45.174767431 +0000 UTC m=+906.584424470" lastFinishedPulling="2025-10-01 06:31:50.483069579 +0000 UTC m=+911.892726618" observedRunningTime="2025-10-01 06:31:51.1735804 +0000 UTC m=+912.583237459" watchObservedRunningTime="2025-10-01 06:31:51.17552324 +0000 UTC m=+912.585180289" Oct 01 06:32:04 crc kubenswrapper[4747]: I1001 06:32:04.955019 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-56d5bb7f9d-8z2pb" Oct 01 06:32:06 crc kubenswrapper[4747]: I1001 06:32:06.738698 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-db-create-hc9dd"] Oct 01 06:32:06 crc kubenswrapper[4747]: I1001 06:32:06.739846 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-hc9dd" Oct 01 06:32:06 crc kubenswrapper[4747]: I1001 06:32:06.745969 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-create-hc9dd"] Oct 01 06:32:06 crc kubenswrapper[4747]: I1001 06:32:06.835207 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrlf8\" (UniqueName: \"kubernetes.io/projected/a71ca0c7-a4a5-4006-ae5b-8061889dd77c-kube-api-access-xrlf8\") pod \"keystone-db-create-hc9dd\" (UID: \"a71ca0c7-a4a5-4006-ae5b-8061889dd77c\") " pod="glance-kuttl-tests/keystone-db-create-hc9dd" Oct 01 06:32:06 crc kubenswrapper[4747]: I1001 06:32:06.936169 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrlf8\" (UniqueName: \"kubernetes.io/projected/a71ca0c7-a4a5-4006-ae5b-8061889dd77c-kube-api-access-xrlf8\") pod \"keystone-db-create-hc9dd\" (UID: \"a71ca0c7-a4a5-4006-ae5b-8061889dd77c\") " pod="glance-kuttl-tests/keystone-db-create-hc9dd" Oct 01 06:32:06 crc kubenswrapper[4747]: I1001 06:32:06.955054 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrlf8\" (UniqueName: \"kubernetes.io/projected/a71ca0c7-a4a5-4006-ae5b-8061889dd77c-kube-api-access-xrlf8\") pod \"keystone-db-create-hc9dd\" (UID: \"a71ca0c7-a4a5-4006-ae5b-8061889dd77c\") " pod="glance-kuttl-tests/keystone-db-create-hc9dd" Oct 01 06:32:07 crc kubenswrapper[4747]: I1001 06:32:07.060869 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-hc9dd" Oct 01 06:32:07 crc kubenswrapper[4747]: I1001 06:32:07.540389 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-create-hc9dd"] Oct 01 06:32:07 crc kubenswrapper[4747]: I1001 06:32:07.738991 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/rabbitmq-server-0" Oct 01 06:32:08 crc kubenswrapper[4747]: I1001 06:32:08.312623 4747 generic.go:334] "Generic (PLEG): container finished" podID="a71ca0c7-a4a5-4006-ae5b-8061889dd77c" containerID="88c8bd70fcef133d9effa0832e5bed9d94af164b8095c8e716dbd98f6ee556a6" exitCode=0 Oct 01 06:32:08 crc kubenswrapper[4747]: I1001 06:32:08.312662 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-hc9dd" event={"ID":"a71ca0c7-a4a5-4006-ae5b-8061889dd77c","Type":"ContainerDied","Data":"88c8bd70fcef133d9effa0832e5bed9d94af164b8095c8e716dbd98f6ee556a6"} Oct 01 06:32:08 crc kubenswrapper[4747]: I1001 06:32:08.312687 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-hc9dd" event={"ID":"a71ca0c7-a4a5-4006-ae5b-8061889dd77c","Type":"ContainerStarted","Data":"3fcf7041dcf4f779db2193110624c2ecd7a5741d1da3d8a3d5dae9c8dbe8c607"} Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.643467 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-hc9dd" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.716174 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-vjvl2"] Oct 01 06:32:09 crc kubenswrapper[4747]: E1001 06:32:09.716992 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a71ca0c7-a4a5-4006-ae5b-8061889dd77c" containerName="mariadb-database-create" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.717023 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a71ca0c7-a4a5-4006-ae5b-8061889dd77c" containerName="mariadb-database-create" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.717524 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a71ca0c7-a4a5-4006-ae5b-8061889dd77c" containerName="mariadb-database-create" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.721365 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-vjvl2" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.723790 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-index-dockercfg-xklwg" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.729721 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-vjvl2"] Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.772980 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrlf8\" (UniqueName: \"kubernetes.io/projected/a71ca0c7-a4a5-4006-ae5b-8061889dd77c-kube-api-access-xrlf8\") pod \"a71ca0c7-a4a5-4006-ae5b-8061889dd77c\" (UID: \"a71ca0c7-a4a5-4006-ae5b-8061889dd77c\") " Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.773554 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vng5s\" (UniqueName: \"kubernetes.io/projected/397f88ec-9647-4874-8652-573ee0729791-kube-api-access-vng5s\") pod \"horizon-operator-index-vjvl2\" (UID: \"397f88ec-9647-4874-8652-573ee0729791\") " pod="openstack-operators/horizon-operator-index-vjvl2" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.783108 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a71ca0c7-a4a5-4006-ae5b-8061889dd77c-kube-api-access-xrlf8" (OuterVolumeSpecName: "kube-api-access-xrlf8") pod "a71ca0c7-a4a5-4006-ae5b-8061889dd77c" (UID: "a71ca0c7-a4a5-4006-ae5b-8061889dd77c"). InnerVolumeSpecName "kube-api-access-xrlf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.874630 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vng5s\" (UniqueName: \"kubernetes.io/projected/397f88ec-9647-4874-8652-573ee0729791-kube-api-access-vng5s\") pod \"horizon-operator-index-vjvl2\" (UID: \"397f88ec-9647-4874-8652-573ee0729791\") " pod="openstack-operators/horizon-operator-index-vjvl2" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.874737 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrlf8\" (UniqueName: \"kubernetes.io/projected/a71ca0c7-a4a5-4006-ae5b-8061889dd77c-kube-api-access-xrlf8\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:09 crc kubenswrapper[4747]: I1001 06:32:09.901145 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vng5s\" (UniqueName: \"kubernetes.io/projected/397f88ec-9647-4874-8652-573ee0729791-kube-api-access-vng5s\") pod \"horizon-operator-index-vjvl2\" (UID: \"397f88ec-9647-4874-8652-573ee0729791\") " pod="openstack-operators/horizon-operator-index-vjvl2" Oct 01 06:32:10 crc kubenswrapper[4747]: I1001 06:32:10.052278 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-vjvl2" Oct 01 06:32:10 crc kubenswrapper[4747]: I1001 06:32:10.330859 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-create-hc9dd" event={"ID":"a71ca0c7-a4a5-4006-ae5b-8061889dd77c","Type":"ContainerDied","Data":"3fcf7041dcf4f779db2193110624c2ecd7a5741d1da3d8a3d5dae9c8dbe8c607"} Oct 01 06:32:10 crc kubenswrapper[4747]: I1001 06:32:10.330896 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fcf7041dcf4f779db2193110624c2ecd7a5741d1da3d8a3d5dae9c8dbe8c607" Oct 01 06:32:10 crc kubenswrapper[4747]: I1001 06:32:10.330969 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-create-hc9dd" Oct 01 06:32:10 crc kubenswrapper[4747]: I1001 06:32:10.521548 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-vjvl2"] Oct 01 06:32:10 crc kubenswrapper[4747]: W1001 06:32:10.545291 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod397f88ec_9647_4874_8652_573ee0729791.slice/crio-965fca47c5ac7774de1b79ba1cb5ff41048956c6d2abbe76991562947f351cfa WatchSource:0}: Error finding container 965fca47c5ac7774de1b79ba1cb5ff41048956c6d2abbe76991562947f351cfa: Status 404 returned error can't find the container with id 965fca47c5ac7774de1b79ba1cb5ff41048956c6d2abbe76991562947f351cfa Oct 01 06:32:11 crc kubenswrapper[4747]: I1001 06:32:11.339971 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-vjvl2" event={"ID":"397f88ec-9647-4874-8652-573ee0729791","Type":"ContainerStarted","Data":"965fca47c5ac7774de1b79ba1cb5ff41048956c6d2abbe76991562947f351cfa"} Oct 01 06:32:12 crc kubenswrapper[4747]: I1001 06:32:12.715654 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-index-mk8pd"] Oct 01 06:32:12 crc kubenswrapper[4747]: I1001 06:32:12.718128 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:12 crc kubenswrapper[4747]: I1001 06:32:12.721136 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-index-dockercfg-qm2vq" Oct 01 06:32:12 crc kubenswrapper[4747]: I1001 06:32:12.726442 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-mk8pd"] Oct 01 06:32:12 crc kubenswrapper[4747]: I1001 06:32:12.834248 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf89s\" (UniqueName: \"kubernetes.io/projected/344442ff-e0f2-437f-806c-62c39d7d5aaf-kube-api-access-cf89s\") pod \"swift-operator-index-mk8pd\" (UID: \"344442ff-e0f2-437f-806c-62c39d7d5aaf\") " pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:12 crc kubenswrapper[4747]: I1001 06:32:12.936631 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf89s\" (UniqueName: \"kubernetes.io/projected/344442ff-e0f2-437f-806c-62c39d7d5aaf-kube-api-access-cf89s\") pod \"swift-operator-index-mk8pd\" (UID: \"344442ff-e0f2-437f-806c-62c39d7d5aaf\") " pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:12 crc kubenswrapper[4747]: I1001 06:32:12.971803 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf89s\" (UniqueName: \"kubernetes.io/projected/344442ff-e0f2-437f-806c-62c39d7d5aaf-kube-api-access-cf89s\") pod \"swift-operator-index-mk8pd\" (UID: \"344442ff-e0f2-437f-806c-62c39d7d5aaf\") " pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:13 crc kubenswrapper[4747]: I1001 06:32:13.045072 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:13 crc kubenswrapper[4747]: I1001 06:32:13.337178 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-index-mk8pd"] Oct 01 06:32:13 crc kubenswrapper[4747]: I1001 06:32:13.354820 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-vjvl2" event={"ID":"397f88ec-9647-4874-8652-573ee0729791","Type":"ContainerStarted","Data":"167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66"} Oct 01 06:32:13 crc kubenswrapper[4747]: I1001 06:32:13.372913 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-vjvl2" podStartSLOduration=2.543220698 podStartE2EDuration="4.372896456s" podCreationTimestamp="2025-10-01 06:32:09 +0000 UTC" firstStartedPulling="2025-10-01 06:32:10.549147569 +0000 UTC m=+931.958804618" lastFinishedPulling="2025-10-01 06:32:12.378823287 +0000 UTC m=+933.788480376" observedRunningTime="2025-10-01 06:32:13.369603513 +0000 UTC m=+934.779260582" watchObservedRunningTime="2025-10-01 06:32:13.372896456 +0000 UTC m=+934.782553515" Oct 01 06:32:14 crc kubenswrapper[4747]: I1001 06:32:14.365122 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-mk8pd" event={"ID":"344442ff-e0f2-437f-806c-62c39d7d5aaf","Type":"ContainerStarted","Data":"199a9f35b74563dd786192af19190ff21626f057a2aef47c9208f5cce8003bd2"} Oct 01 06:32:14 crc kubenswrapper[4747]: I1001 06:32:14.902357 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-vjvl2"] Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.373311 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-index-mk8pd" event={"ID":"344442ff-e0f2-437f-806c-62c39d7d5aaf","Type":"ContainerStarted","Data":"548d5cc51ff752e282cc8945b782c3ff13aa355be4330e6034f0471d3b57aede"} Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.373674 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/horizon-operator-index-vjvl2" podUID="397f88ec-9647-4874-8652-573ee0729791" containerName="registry-server" containerID="cri-o://167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66" gracePeriod=2 Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.387560 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-index-mk8pd" podStartSLOduration=1.524905951 podStartE2EDuration="3.38753995s" podCreationTimestamp="2025-10-01 06:32:12 +0000 UTC" firstStartedPulling="2025-10-01 06:32:13.350916677 +0000 UTC m=+934.760573736" lastFinishedPulling="2025-10-01 06:32:15.213550686 +0000 UTC m=+936.623207735" observedRunningTime="2025-10-01 06:32:15.385169011 +0000 UTC m=+936.794826060" watchObservedRunningTime="2025-10-01 06:32:15.38753995 +0000 UTC m=+936.797197029" Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.708872 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-llv5t"] Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.711008 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.721325 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-llv5t"] Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.775740 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmlh4\" (UniqueName: \"kubernetes.io/projected/06748093-f73a-4ea5-a452-3b18dc0a9581-kube-api-access-mmlh4\") pod \"horizon-operator-index-llv5t\" (UID: \"06748093-f73a-4ea5-a452-3b18dc0a9581\") " pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.791427 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-vjvl2" Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.878632 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vng5s\" (UniqueName: \"kubernetes.io/projected/397f88ec-9647-4874-8652-573ee0729791-kube-api-access-vng5s\") pod \"397f88ec-9647-4874-8652-573ee0729791\" (UID: \"397f88ec-9647-4874-8652-573ee0729791\") " Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.878868 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmlh4\" (UniqueName: \"kubernetes.io/projected/06748093-f73a-4ea5-a452-3b18dc0a9581-kube-api-access-mmlh4\") pod \"horizon-operator-index-llv5t\" (UID: \"06748093-f73a-4ea5-a452-3b18dc0a9581\") " pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.887084 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/397f88ec-9647-4874-8652-573ee0729791-kube-api-access-vng5s" (OuterVolumeSpecName: "kube-api-access-vng5s") pod "397f88ec-9647-4874-8652-573ee0729791" (UID: "397f88ec-9647-4874-8652-573ee0729791"). InnerVolumeSpecName "kube-api-access-vng5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.907046 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmlh4\" (UniqueName: \"kubernetes.io/projected/06748093-f73a-4ea5-a452-3b18dc0a9581-kube-api-access-mmlh4\") pod \"horizon-operator-index-llv5t\" (UID: \"06748093-f73a-4ea5-a452-3b18dc0a9581\") " pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:15 crc kubenswrapper[4747]: I1001 06:32:15.980987 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vng5s\" (UniqueName: \"kubernetes.io/projected/397f88ec-9647-4874-8652-573ee0729791-kube-api-access-vng5s\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.033403 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.383082 4747 generic.go:334] "Generic (PLEG): container finished" podID="397f88ec-9647-4874-8652-573ee0729791" containerID="167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66" exitCode=0 Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.383168 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-vjvl2" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.383234 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-vjvl2" event={"ID":"397f88ec-9647-4874-8652-573ee0729791","Type":"ContainerDied","Data":"167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66"} Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.383474 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-vjvl2" event={"ID":"397f88ec-9647-4874-8652-573ee0729791","Type":"ContainerDied","Data":"965fca47c5ac7774de1b79ba1cb5ff41048956c6d2abbe76991562947f351cfa"} Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.383592 4747 scope.go:117] "RemoveContainer" containerID="167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.415499 4747 scope.go:117] "RemoveContainer" containerID="167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66" Oct 01 06:32:16 crc kubenswrapper[4747]: E1001 06:32:16.416046 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66\": container with ID starting with 167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66 not found: ID does not exist" containerID="167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.416113 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66"} err="failed to get container status \"167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66\": rpc error: code = NotFound desc = could not find container \"167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66\": container with ID starting with 167308006ffd227ec00db5a26e359f88c8b096f599539dd4d09be5d69e1cbf66 not found: ID does not exist" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.416943 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-vjvl2"] Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.421443 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/horizon-operator-index-vjvl2"] Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.564298 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-llv5t"] Oct 01 06:32:16 crc kubenswrapper[4747]: W1001 06:32:16.570462 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod06748093_f73a_4ea5_a452_3b18dc0a9581.slice/crio-42c41af901fc0025d486884282db943183d46a3cb48e1993e3f8d9710e5d9377 WatchSource:0}: Error finding container 42c41af901fc0025d486884282db943183d46a3cb48e1993e3f8d9710e5d9377: Status 404 returned error can't find the container with id 42c41af901fc0025d486884282db943183d46a3cb48e1993e3f8d9710e5d9377 Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.652152 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-9204-account-create-d75b4"] Oct 01 06:32:16 crc kubenswrapper[4747]: E1001 06:32:16.652450 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="397f88ec-9647-4874-8652-573ee0729791" containerName="registry-server" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.652471 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="397f88ec-9647-4874-8652-573ee0729791" containerName="registry-server" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.652617 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="397f88ec-9647-4874-8652-573ee0729791" containerName="registry-server" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.653148 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.655136 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-db-secret" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.661274 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-9204-account-create-d75b4"] Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.795791 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47mzw\" (UniqueName: \"kubernetes.io/projected/4d4143ef-6665-4852-89f3-735b1952cd3c-kube-api-access-47mzw\") pod \"keystone-9204-account-create-d75b4\" (UID: \"4d4143ef-6665-4852-89f3-735b1952cd3c\") " pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.897841 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47mzw\" (UniqueName: \"kubernetes.io/projected/4d4143ef-6665-4852-89f3-735b1952cd3c-kube-api-access-47mzw\") pod \"keystone-9204-account-create-d75b4\" (UID: \"4d4143ef-6665-4852-89f3-735b1952cd3c\") " pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.938535 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47mzw\" (UniqueName: \"kubernetes.io/projected/4d4143ef-6665-4852-89f3-735b1952cd3c-kube-api-access-47mzw\") pod \"keystone-9204-account-create-d75b4\" (UID: \"4d4143ef-6665-4852-89f3-735b1952cd3c\") " pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" Oct 01 06:32:16 crc kubenswrapper[4747]: I1001 06:32:16.967702 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" Oct 01 06:32:17 crc kubenswrapper[4747]: I1001 06:32:17.292006 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="397f88ec-9647-4874-8652-573ee0729791" path="/var/lib/kubelet/pods/397f88ec-9647-4874-8652-573ee0729791/volumes" Oct 01 06:32:17 crc kubenswrapper[4747]: I1001 06:32:17.395054 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-llv5t" event={"ID":"06748093-f73a-4ea5-a452-3b18dc0a9581","Type":"ContainerStarted","Data":"66fecc9939728ebbe3dae1a02947d65f7e77e9909f732f6c302999865020a34d"} Oct 01 06:32:17 crc kubenswrapper[4747]: I1001 06:32:17.395487 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-llv5t" event={"ID":"06748093-f73a-4ea5-a452-3b18dc0a9581","Type":"ContainerStarted","Data":"42c41af901fc0025d486884282db943183d46a3cb48e1993e3f8d9710e5d9377"} Oct 01 06:32:17 crc kubenswrapper[4747]: I1001 06:32:17.428556 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-llv5t" podStartSLOduration=1.930860391 podStartE2EDuration="2.428524216s" podCreationTimestamp="2025-10-01 06:32:15 +0000 UTC" firstStartedPulling="2025-10-01 06:32:16.574266951 +0000 UTC m=+937.983924010" lastFinishedPulling="2025-10-01 06:32:17.071930746 +0000 UTC m=+938.481587835" observedRunningTime="2025-10-01 06:32:17.41729649 +0000 UTC m=+938.826953589" watchObservedRunningTime="2025-10-01 06:32:17.428524216 +0000 UTC m=+938.838181305" Oct 01 06:32:17 crc kubenswrapper[4747]: I1001 06:32:17.439047 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-9204-account-create-d75b4"] Oct 01 06:32:17 crc kubenswrapper[4747]: W1001 06:32:17.439736 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d4143ef_6665_4852_89f3_735b1952cd3c.slice/crio-04ab721f1fffb83d2ca0fae9738516f9311172a56f11feaa5cd39bfe11f73117 WatchSource:0}: Error finding container 04ab721f1fffb83d2ca0fae9738516f9311172a56f11feaa5cd39bfe11f73117: Status 404 returned error can't find the container with id 04ab721f1fffb83d2ca0fae9738516f9311172a56f11feaa5cd39bfe11f73117 Oct 01 06:32:18 crc kubenswrapper[4747]: I1001 06:32:18.410558 4747 generic.go:334] "Generic (PLEG): container finished" podID="4d4143ef-6665-4852-89f3-735b1952cd3c" containerID="423a8523ab448d99b2ca82a8173331cf13833fb1e8a0266a0d5d4dcc042a858f" exitCode=0 Oct 01 06:32:18 crc kubenswrapper[4747]: I1001 06:32:18.410633 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" event={"ID":"4d4143ef-6665-4852-89f3-735b1952cd3c","Type":"ContainerDied","Data":"423a8523ab448d99b2ca82a8173331cf13833fb1e8a0266a0d5d4dcc042a858f"} Oct 01 06:32:18 crc kubenswrapper[4747]: I1001 06:32:18.410948 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" event={"ID":"4d4143ef-6665-4852-89f3-735b1952cd3c","Type":"ContainerStarted","Data":"04ab721f1fffb83d2ca0fae9738516f9311172a56f11feaa5cd39bfe11f73117"} Oct 01 06:32:19 crc kubenswrapper[4747]: I1001 06:32:19.748601 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" Oct 01 06:32:19 crc kubenswrapper[4747]: I1001 06:32:19.844197 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47mzw\" (UniqueName: \"kubernetes.io/projected/4d4143ef-6665-4852-89f3-735b1952cd3c-kube-api-access-47mzw\") pod \"4d4143ef-6665-4852-89f3-735b1952cd3c\" (UID: \"4d4143ef-6665-4852-89f3-735b1952cd3c\") " Oct 01 06:32:19 crc kubenswrapper[4747]: I1001 06:32:19.852598 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d4143ef-6665-4852-89f3-735b1952cd3c-kube-api-access-47mzw" (OuterVolumeSpecName: "kube-api-access-47mzw") pod "4d4143ef-6665-4852-89f3-735b1952cd3c" (UID: "4d4143ef-6665-4852-89f3-735b1952cd3c"). InnerVolumeSpecName "kube-api-access-47mzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:32:19 crc kubenswrapper[4747]: I1001 06:32:19.946419 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47mzw\" (UniqueName: \"kubernetes.io/projected/4d4143ef-6665-4852-89f3-735b1952cd3c-kube-api-access-47mzw\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:20 crc kubenswrapper[4747]: I1001 06:32:20.429157 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" event={"ID":"4d4143ef-6665-4852-89f3-735b1952cd3c","Type":"ContainerDied","Data":"04ab721f1fffb83d2ca0fae9738516f9311172a56f11feaa5cd39bfe11f73117"} Oct 01 06:32:20 crc kubenswrapper[4747]: I1001 06:32:20.429229 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04ab721f1fffb83d2ca0fae9738516f9311172a56f11feaa5cd39bfe11f73117" Oct 01 06:32:20 crc kubenswrapper[4747]: I1001 06:32:20.429248 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-9204-account-create-d75b4" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.142467 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-db-sync-4wbfs"] Oct 01 06:32:22 crc kubenswrapper[4747]: E1001 06:32:22.143148 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d4143ef-6665-4852-89f3-735b1952cd3c" containerName="mariadb-account-create" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.143168 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d4143ef-6665-4852-89f3-735b1952cd3c" containerName="mariadb-account-create" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.143380 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d4143ef-6665-4852-89f3-735b1952cd3c" containerName="mariadb-account-create" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.144100 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.146607 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.147259 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-qdkq2" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.148085 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.149500 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.156283 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-4wbfs"] Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.286388 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sj7l\" (UniqueName: \"kubernetes.io/projected/274c38e7-1453-412a-9751-fd095df2157b-kube-api-access-7sj7l\") pod \"keystone-db-sync-4wbfs\" (UID: \"274c38e7-1453-412a-9751-fd095df2157b\") " pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.286508 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274c38e7-1453-412a-9751-fd095df2157b-config-data\") pod \"keystone-db-sync-4wbfs\" (UID: \"274c38e7-1453-412a-9751-fd095df2157b\") " pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.388255 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274c38e7-1453-412a-9751-fd095df2157b-config-data\") pod \"keystone-db-sync-4wbfs\" (UID: \"274c38e7-1453-412a-9751-fd095df2157b\") " pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.388472 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sj7l\" (UniqueName: \"kubernetes.io/projected/274c38e7-1453-412a-9751-fd095df2157b-kube-api-access-7sj7l\") pod \"keystone-db-sync-4wbfs\" (UID: \"274c38e7-1453-412a-9751-fd095df2157b\") " pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.397095 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274c38e7-1453-412a-9751-fd095df2157b-config-data\") pod \"keystone-db-sync-4wbfs\" (UID: \"274c38e7-1453-412a-9751-fd095df2157b\") " pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.418004 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sj7l\" (UniqueName: \"kubernetes.io/projected/274c38e7-1453-412a-9751-fd095df2157b-kube-api-access-7sj7l\") pod \"keystone-db-sync-4wbfs\" (UID: \"274c38e7-1453-412a-9751-fd095df2157b\") " pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.474980 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.959316 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-4wbfs"] Oct 01 06:32:22 crc kubenswrapper[4747]: I1001 06:32:22.965723 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:32:23 crc kubenswrapper[4747]: I1001 06:32:23.045661 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:23 crc kubenswrapper[4747]: I1001 06:32:23.045717 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:23 crc kubenswrapper[4747]: I1001 06:32:23.082640 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:23 crc kubenswrapper[4747]: I1001 06:32:23.454290 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" event={"ID":"274c38e7-1453-412a-9751-fd095df2157b","Type":"ContainerStarted","Data":"dc9a72ed3df4213438cfc3ae5965d81b037ba117bfd4ea3a4dd0d80c6c990b11"} Oct 01 06:32:23 crc kubenswrapper[4747]: I1001 06:32:23.538454 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-index-mk8pd" Oct 01 06:32:26 crc kubenswrapper[4747]: I1001 06:32:26.033820 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:26 crc kubenswrapper[4747]: I1001 06:32:26.034175 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:26 crc kubenswrapper[4747]: I1001 06:32:26.076519 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:26 crc kubenswrapper[4747]: I1001 06:32:26.508841 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-index-llv5t" Oct 01 06:32:30 crc kubenswrapper[4747]: I1001 06:32:30.515958 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" event={"ID":"274c38e7-1453-412a-9751-fd095df2157b","Type":"ContainerStarted","Data":"6506687389c71fc9bfa0d963c1a142e31e211dc4e1a46ad569fa14e32bc6c970"} Oct 01 06:32:30 crc kubenswrapper[4747]: I1001 06:32:30.547270 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" podStartSLOduration=1.715495363 podStartE2EDuration="8.547237736s" podCreationTimestamp="2025-10-01 06:32:22 +0000 UTC" firstStartedPulling="2025-10-01 06:32:22.965422043 +0000 UTC m=+944.375079092" lastFinishedPulling="2025-10-01 06:32:29.797164386 +0000 UTC m=+951.206821465" observedRunningTime="2025-10-01 06:32:30.539374515 +0000 UTC m=+951.949031624" watchObservedRunningTime="2025-10-01 06:32:30.547237736 +0000 UTC m=+951.956894865" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.762433 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v"] Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.765395 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.769689 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-b9vtl" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.782014 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v"] Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.841448 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-bundle\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.841646 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxplg\" (UniqueName: \"kubernetes.io/projected/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-kube-api-access-xxplg\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.841715 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-util\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.943924 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxplg\" (UniqueName: \"kubernetes.io/projected/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-kube-api-access-xxplg\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.944047 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-util\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.944268 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-bundle\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.945064 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-util\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.945150 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-bundle\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:31 crc kubenswrapper[4747]: I1001 06:32:31.965306 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxplg\" (UniqueName: \"kubernetes.io/projected/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-kube-api-access-xxplg\") pod \"ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.106364 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.586275 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v"] Oct 01 06:32:32 crc kubenswrapper[4747]: W1001 06:32:32.590217 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a45e1f7_8e3a_4628_95fa_1e0d77b95217.slice/crio-21d377b204f6d77686444ccb05061ca4234c50d5dfc30ba8a130f6588659778c WatchSource:0}: Error finding container 21d377b204f6d77686444ccb05061ca4234c50d5dfc30ba8a130f6588659778c: Status 404 returned error can't find the container with id 21d377b204f6d77686444ccb05061ca4234c50d5dfc30ba8a130f6588659778c Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.742025 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw"] Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.744776 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.759636 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw"] Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.856185 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-bundle\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.856259 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-util\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.856516 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jb42\" (UniqueName: \"kubernetes.io/projected/a4150451-d4ba-43d4-a834-f97510776094-kube-api-access-8jb42\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: E1001 06:32:32.896992 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a45e1f7_8e3a_4628_95fa_1e0d77b95217.slice/crio-4bfd132dfe116fd5e6d303e227c08ad59e800fcb23a7b47a7b8eaf9ab10296c1.scope\": RecentStats: unable to find data in memory cache]" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.957620 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-bundle\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.957703 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-util\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.957824 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jb42\" (UniqueName: \"kubernetes.io/projected/a4150451-d4ba-43d4-a834-f97510776094-kube-api-access-8jb42\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.958444 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-bundle\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.958447 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-util\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:32 crc kubenswrapper[4747]: I1001 06:32:32.987468 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jb42\" (UniqueName: \"kubernetes.io/projected/a4150451-d4ba-43d4-a834-f97510776094-kube-api-access-8jb42\") pod \"2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:33 crc kubenswrapper[4747]: I1001 06:32:33.075129 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:33 crc kubenswrapper[4747]: I1001 06:32:33.365832 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw"] Oct 01 06:32:33 crc kubenswrapper[4747]: I1001 06:32:33.539106 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" event={"ID":"a4150451-d4ba-43d4-a834-f97510776094","Type":"ContainerStarted","Data":"d247d5e64fe8019e36719a5998838b96af8d47bfc17609ce7d42b5b9190f1241"} Oct 01 06:32:33 crc kubenswrapper[4747]: I1001 06:32:33.539160 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" event={"ID":"a4150451-d4ba-43d4-a834-f97510776094","Type":"ContainerStarted","Data":"31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5"} Oct 01 06:32:33 crc kubenswrapper[4747]: I1001 06:32:33.540942 4747 generic.go:334] "Generic (PLEG): container finished" podID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerID="4bfd132dfe116fd5e6d303e227c08ad59e800fcb23a7b47a7b8eaf9ab10296c1" exitCode=0 Oct 01 06:32:33 crc kubenswrapper[4747]: I1001 06:32:33.540992 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" event={"ID":"4a45e1f7-8e3a-4628-95fa-1e0d77b95217","Type":"ContainerDied","Data":"4bfd132dfe116fd5e6d303e227c08ad59e800fcb23a7b47a7b8eaf9ab10296c1"} Oct 01 06:32:33 crc kubenswrapper[4747]: I1001 06:32:33.541031 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" event={"ID":"4a45e1f7-8e3a-4628-95fa-1e0d77b95217","Type":"ContainerStarted","Data":"21d377b204f6d77686444ccb05061ca4234c50d5dfc30ba8a130f6588659778c"} Oct 01 06:32:34 crc kubenswrapper[4747]: I1001 06:32:34.553469 4747 generic.go:334] "Generic (PLEG): container finished" podID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerID="f4d7d8bac3d39e757acc2b7fa06bb158dcd046856af9854c5f6fed106147836e" exitCode=0 Oct 01 06:32:34 crc kubenswrapper[4747]: I1001 06:32:34.553593 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" event={"ID":"4a45e1f7-8e3a-4628-95fa-1e0d77b95217","Type":"ContainerDied","Data":"f4d7d8bac3d39e757acc2b7fa06bb158dcd046856af9854c5f6fed106147836e"} Oct 01 06:32:34 crc kubenswrapper[4747]: I1001 06:32:34.556782 4747 generic.go:334] "Generic (PLEG): container finished" podID="274c38e7-1453-412a-9751-fd095df2157b" containerID="6506687389c71fc9bfa0d963c1a142e31e211dc4e1a46ad569fa14e32bc6c970" exitCode=0 Oct 01 06:32:34 crc kubenswrapper[4747]: I1001 06:32:34.556923 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" event={"ID":"274c38e7-1453-412a-9751-fd095df2157b","Type":"ContainerDied","Data":"6506687389c71fc9bfa0d963c1a142e31e211dc4e1a46ad569fa14e32bc6c970"} Oct 01 06:32:34 crc kubenswrapper[4747]: I1001 06:32:34.559916 4747 generic.go:334] "Generic (PLEG): container finished" podID="a4150451-d4ba-43d4-a834-f97510776094" containerID="d247d5e64fe8019e36719a5998838b96af8d47bfc17609ce7d42b5b9190f1241" exitCode=0 Oct 01 06:32:34 crc kubenswrapper[4747]: I1001 06:32:34.559981 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" event={"ID":"a4150451-d4ba-43d4-a834-f97510776094","Type":"ContainerDied","Data":"d247d5e64fe8019e36719a5998838b96af8d47bfc17609ce7d42b5b9190f1241"} Oct 01 06:32:35 crc kubenswrapper[4747]: I1001 06:32:35.572325 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" event={"ID":"a4150451-d4ba-43d4-a834-f97510776094","Type":"ContainerStarted","Data":"a051bd523e25033d3e391f13617ebd061c815096c74adb4e24ff1f227b4686d1"} Oct 01 06:32:35 crc kubenswrapper[4747]: I1001 06:32:35.578270 4747 generic.go:334] "Generic (PLEG): container finished" podID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerID="621117cd4a467ef66067c235807aca644e1ec9f43009dc5c3f3409b53f35e708" exitCode=0 Oct 01 06:32:35 crc kubenswrapper[4747]: I1001 06:32:35.578695 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" event={"ID":"4a45e1f7-8e3a-4628-95fa-1e0d77b95217","Type":"ContainerDied","Data":"621117cd4a467ef66067c235807aca644e1ec9f43009dc5c3f3409b53f35e708"} Oct 01 06:32:35 crc kubenswrapper[4747]: I1001 06:32:35.881943 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.008396 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274c38e7-1453-412a-9751-fd095df2157b-config-data\") pod \"274c38e7-1453-412a-9751-fd095df2157b\" (UID: \"274c38e7-1453-412a-9751-fd095df2157b\") " Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.008860 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sj7l\" (UniqueName: \"kubernetes.io/projected/274c38e7-1453-412a-9751-fd095df2157b-kube-api-access-7sj7l\") pod \"274c38e7-1453-412a-9751-fd095df2157b\" (UID: \"274c38e7-1453-412a-9751-fd095df2157b\") " Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.016742 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/274c38e7-1453-412a-9751-fd095df2157b-kube-api-access-7sj7l" (OuterVolumeSpecName: "kube-api-access-7sj7l") pod "274c38e7-1453-412a-9751-fd095df2157b" (UID: "274c38e7-1453-412a-9751-fd095df2157b"). InnerVolumeSpecName "kube-api-access-7sj7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.071830 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/274c38e7-1453-412a-9751-fd095df2157b-config-data" (OuterVolumeSpecName: "config-data") pod "274c38e7-1453-412a-9751-fd095df2157b" (UID: "274c38e7-1453-412a-9751-fd095df2157b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.110895 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274c38e7-1453-412a-9751-fd095df2157b-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.110940 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sj7l\" (UniqueName: \"kubernetes.io/projected/274c38e7-1453-412a-9751-fd095df2157b-kube-api-access-7sj7l\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.590783 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" event={"ID":"274c38e7-1453-412a-9751-fd095df2157b","Type":"ContainerDied","Data":"dc9a72ed3df4213438cfc3ae5965d81b037ba117bfd4ea3a4dd0d80c6c990b11"} Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.590840 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc9a72ed3df4213438cfc3ae5965d81b037ba117bfd4ea3a4dd0d80c6c990b11" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.590842 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-db-sync-4wbfs" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.594538 4747 generic.go:334] "Generic (PLEG): container finished" podID="a4150451-d4ba-43d4-a834-f97510776094" containerID="a051bd523e25033d3e391f13617ebd061c815096c74adb4e24ff1f227b4686d1" exitCode=0 Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.594668 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" event={"ID":"a4150451-d4ba-43d4-a834-f97510776094","Type":"ContainerDied","Data":"a051bd523e25033d3e391f13617ebd061c815096c74adb4e24ff1f227b4686d1"} Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.741270 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-td5rr"] Oct 01 06:32:36 crc kubenswrapper[4747]: E1001 06:32:36.741537 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="274c38e7-1453-412a-9751-fd095df2157b" containerName="keystone-db-sync" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.741557 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="274c38e7-1453-412a-9751-fd095df2157b" containerName="keystone-db-sync" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.741703 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="274c38e7-1453-412a-9751-fd095df2157b" containerName="keystone-db-sync" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.742218 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.746020 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.746153 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.746340 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-qdkq2" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.750400 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.775179 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-td5rr"] Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.821997 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-fernet-keys\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.822126 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-scripts\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.822186 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9pcf\" (UniqueName: \"kubernetes.io/projected/b83dbf88-816b-49e0-932b-d31a5317a47d-kube-api-access-n9pcf\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.822224 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-credential-keys\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.822276 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-config-data\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.924894 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-credential-keys\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.925036 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-config-data\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.925091 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-fernet-keys\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.925169 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-scripts\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.925209 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9pcf\" (UniqueName: \"kubernetes.io/projected/b83dbf88-816b-49e0-932b-d31a5317a47d-kube-api-access-n9pcf\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.930092 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-credential-keys\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.930260 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-scripts\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.930344 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-config-data\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.930482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-fernet-keys\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:36 crc kubenswrapper[4747]: I1001 06:32:36.943412 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9pcf\" (UniqueName: \"kubernetes.io/projected/b83dbf88-816b-49e0-932b-d31a5317a47d-kube-api-access-n9pcf\") pod \"keystone-bootstrap-td5rr\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.046314 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.067947 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.127178 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-util\") pod \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.127216 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxplg\" (UniqueName: \"kubernetes.io/projected/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-kube-api-access-xxplg\") pod \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.127238 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-bundle\") pod \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\" (UID: \"4a45e1f7-8e3a-4628-95fa-1e0d77b95217\") " Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.129872 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-bundle" (OuterVolumeSpecName: "bundle") pod "4a45e1f7-8e3a-4628-95fa-1e0d77b95217" (UID: "4a45e1f7-8e3a-4628-95fa-1e0d77b95217"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.134270 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-kube-api-access-xxplg" (OuterVolumeSpecName: "kube-api-access-xxplg") pod "4a45e1f7-8e3a-4628-95fa-1e0d77b95217" (UID: "4a45e1f7-8e3a-4628-95fa-1e0d77b95217"). InnerVolumeSpecName "kube-api-access-xxplg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.147425 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-util" (OuterVolumeSpecName: "util") pod "4a45e1f7-8e3a-4628-95fa-1e0d77b95217" (UID: "4a45e1f7-8e3a-4628-95fa-1e0d77b95217"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.229970 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-util\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.230282 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxplg\" (UniqueName: \"kubernetes.io/projected/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-kube-api-access-xxplg\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.230306 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a45e1f7-8e3a-4628-95fa-1e0d77b95217-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.348277 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-td5rr"] Oct 01 06:32:37 crc kubenswrapper[4747]: W1001 06:32:37.358831 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb83dbf88_816b_49e0_932b_d31a5317a47d.slice/crio-00d02bec0fb325d0ae9de2cf0bd1b269e6d7466d9ee3ca7869f5a9c06f814c13 WatchSource:0}: Error finding container 00d02bec0fb325d0ae9de2cf0bd1b269e6d7466d9ee3ca7869f5a9c06f814c13: Status 404 returned error can't find the container with id 00d02bec0fb325d0ae9de2cf0bd1b269e6d7466d9ee3ca7869f5a9c06f814c13 Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.609552 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.609533 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v" event={"ID":"4a45e1f7-8e3a-4628-95fa-1e0d77b95217","Type":"ContainerDied","Data":"21d377b204f6d77686444ccb05061ca4234c50d5dfc30ba8a130f6588659778c"} Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.609808 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21d377b204f6d77686444ccb05061ca4234c50d5dfc30ba8a130f6588659778c" Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.613954 4747 generic.go:334] "Generic (PLEG): container finished" podID="a4150451-d4ba-43d4-a834-f97510776094" containerID="84e86eea540c918f001ba53a224ea4020904a5fa159020ccb445809eefb660c7" exitCode=0 Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.614013 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" event={"ID":"a4150451-d4ba-43d4-a834-f97510776094","Type":"ContainerDied","Data":"84e86eea540c918f001ba53a224ea4020904a5fa159020ccb445809eefb660c7"} Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.616884 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" event={"ID":"b83dbf88-816b-49e0-932b-d31a5317a47d","Type":"ContainerStarted","Data":"0da78a383ff6673aaa0eee0924b9312236842212a1b616112c38a52115ee1d7b"} Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.616937 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" event={"ID":"b83dbf88-816b-49e0-932b-d31a5317a47d","Type":"ContainerStarted","Data":"00d02bec0fb325d0ae9de2cf0bd1b269e6d7466d9ee3ca7869f5a9c06f814c13"} Oct 01 06:32:37 crc kubenswrapper[4747]: I1001 06:32:37.703926 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" podStartSLOduration=1.70390671 podStartE2EDuration="1.70390671s" podCreationTimestamp="2025-10-01 06:32:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:32:37.701298204 +0000 UTC m=+959.110955293" watchObservedRunningTime="2025-10-01 06:32:37.70390671 +0000 UTC m=+959.113563769" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.019988 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.162356 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jb42\" (UniqueName: \"kubernetes.io/projected/a4150451-d4ba-43d4-a834-f97510776094-kube-api-access-8jb42\") pod \"a4150451-d4ba-43d4-a834-f97510776094\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.162457 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-bundle\") pod \"a4150451-d4ba-43d4-a834-f97510776094\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.162665 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-util\") pod \"a4150451-d4ba-43d4-a834-f97510776094\" (UID: \"a4150451-d4ba-43d4-a834-f97510776094\") " Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.164011 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-bundle" (OuterVolumeSpecName: "bundle") pod "a4150451-d4ba-43d4-a834-f97510776094" (UID: "a4150451-d4ba-43d4-a834-f97510776094"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.174294 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4150451-d4ba-43d4-a834-f97510776094-kube-api-access-8jb42" (OuterVolumeSpecName: "kube-api-access-8jb42") pod "a4150451-d4ba-43d4-a834-f97510776094" (UID: "a4150451-d4ba-43d4-a834-f97510776094"). InnerVolumeSpecName "kube-api-access-8jb42". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.185338 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-util" (OuterVolumeSpecName: "util") pod "a4150451-d4ba-43d4-a834-f97510776094" (UID: "a4150451-d4ba-43d4-a834-f97510776094"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.266024 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-util\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.266074 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jb42\" (UniqueName: \"kubernetes.io/projected/a4150451-d4ba-43d4-a834-f97510776094-kube-api-access-8jb42\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.266096 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a4150451-d4ba-43d4-a834-f97510776094-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.637961 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" event={"ID":"a4150451-d4ba-43d4-a834-f97510776094","Type":"ContainerDied","Data":"31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5"} Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.638389 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5" Oct 01 06:32:39 crc kubenswrapper[4747]: I1001 06:32:39.638057 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw" Oct 01 06:32:40 crc kubenswrapper[4747]: I1001 06:32:40.648948 4747 generic.go:334] "Generic (PLEG): container finished" podID="b83dbf88-816b-49e0-932b-d31a5317a47d" containerID="0da78a383ff6673aaa0eee0924b9312236842212a1b616112c38a52115ee1d7b" exitCode=0 Oct 01 06:32:40 crc kubenswrapper[4747]: I1001 06:32:40.649079 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" event={"ID":"b83dbf88-816b-49e0-932b-d31a5317a47d","Type":"ContainerDied","Data":"0da78a383ff6673aaa0eee0924b9312236842212a1b616112c38a52115ee1d7b"} Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.039476 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.108671 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-fernet-keys\") pod \"b83dbf88-816b-49e0-932b-d31a5317a47d\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.108783 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-scripts\") pod \"b83dbf88-816b-49e0-932b-d31a5317a47d\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.108840 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9pcf\" (UniqueName: \"kubernetes.io/projected/b83dbf88-816b-49e0-932b-d31a5317a47d-kube-api-access-n9pcf\") pod \"b83dbf88-816b-49e0-932b-d31a5317a47d\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.109915 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-credential-keys\") pod \"b83dbf88-816b-49e0-932b-d31a5317a47d\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.110024 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-config-data\") pod \"b83dbf88-816b-49e0-932b-d31a5317a47d\" (UID: \"b83dbf88-816b-49e0-932b-d31a5317a47d\") " Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.114994 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b83dbf88-816b-49e0-932b-d31a5317a47d" (UID: "b83dbf88-816b-49e0-932b-d31a5317a47d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.115970 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b83dbf88-816b-49e0-932b-d31a5317a47d" (UID: "b83dbf88-816b-49e0-932b-d31a5317a47d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.116306 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-scripts" (OuterVolumeSpecName: "scripts") pod "b83dbf88-816b-49e0-932b-d31a5317a47d" (UID: "b83dbf88-816b-49e0-932b-d31a5317a47d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.116408 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b83dbf88-816b-49e0-932b-d31a5317a47d-kube-api-access-n9pcf" (OuterVolumeSpecName: "kube-api-access-n9pcf") pod "b83dbf88-816b-49e0-932b-d31a5317a47d" (UID: "b83dbf88-816b-49e0-932b-d31a5317a47d"). InnerVolumeSpecName "kube-api-access-n9pcf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.132944 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-config-data" (OuterVolumeSpecName: "config-data") pod "b83dbf88-816b-49e0-932b-d31a5317a47d" (UID: "b83dbf88-816b-49e0-932b-d31a5317a47d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.211671 4747 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.211725 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.211744 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9pcf\" (UniqueName: \"kubernetes.io/projected/b83dbf88-816b-49e0-932b-d31a5317a47d-kube-api-access-n9pcf\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.211791 4747 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.211810 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b83dbf88-816b-49e0-932b-d31a5317a47d-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.671140 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" event={"ID":"b83dbf88-816b-49e0-932b-d31a5317a47d","Type":"ContainerDied","Data":"00d02bec0fb325d0ae9de2cf0bd1b269e6d7466d9ee3ca7869f5a9c06f814c13"} Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.671209 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00d02bec0fb325d0ae9de2cf0bd1b269e6d7466d9ee3ca7869f5a9c06f814c13" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.671251 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-bootstrap-td5rr" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.782471 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/keystone-c7949ccdb-cvdvf"] Oct 01 06:32:42 crc kubenswrapper[4747]: E1001 06:32:42.782906 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4150451-d4ba-43d4-a834-f97510776094" containerName="pull" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.782927 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4150451-d4ba-43d4-a834-f97510776094" containerName="pull" Oct 01 06:32:42 crc kubenswrapper[4747]: E1001 06:32:42.782950 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4150451-d4ba-43d4-a834-f97510776094" containerName="util" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.782963 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4150451-d4ba-43d4-a834-f97510776094" containerName="util" Oct 01 06:32:42 crc kubenswrapper[4747]: E1001 06:32:42.782984 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4150451-d4ba-43d4-a834-f97510776094" containerName="extract" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.782997 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4150451-d4ba-43d4-a834-f97510776094" containerName="extract" Oct 01 06:32:42 crc kubenswrapper[4747]: E1001 06:32:42.783017 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b83dbf88-816b-49e0-932b-d31a5317a47d" containerName="keystone-bootstrap" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.783029 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="b83dbf88-816b-49e0-932b-d31a5317a47d" containerName="keystone-bootstrap" Oct 01 06:32:42 crc kubenswrapper[4747]: E1001 06:32:42.783051 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerName="util" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.783063 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerName="util" Oct 01 06:32:42 crc kubenswrapper[4747]: E1001 06:32:42.783078 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerName="pull" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.783090 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerName="pull" Oct 01 06:32:42 crc kubenswrapper[4747]: E1001 06:32:42.783107 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerName="extract" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.783120 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerName="extract" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.783378 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4150451-d4ba-43d4-a834-f97510776094" containerName="extract" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.783410 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a45e1f7-8e3a-4628-95fa-1e0d77b95217" containerName="extract" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.783427 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="b83dbf88-816b-49e0-932b-d31a5317a47d" containerName="keystone-bootstrap" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.784082 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.786845 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-keystone-dockercfg-qdkq2" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.787035 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.787053 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-config-data" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.787357 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"keystone-scripts" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.803417 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-c7949ccdb-cvdvf"] Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.922337 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-scripts\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.922589 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-fernet-keys\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.922669 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-credential-keys\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.922717 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-config-data\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:42 crc kubenswrapper[4747]: I1001 06:32:42.922889 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm62g\" (UniqueName: \"kubernetes.io/projected/a23fe2de-1da0-4e06-8b11-90c618eb2a15-kube-api-access-fm62g\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.024374 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-fernet-keys\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.024438 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-credential-keys\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.024493 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-config-data\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.024592 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm62g\" (UniqueName: \"kubernetes.io/projected/a23fe2de-1da0-4e06-8b11-90c618eb2a15-kube-api-access-fm62g\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.024674 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-scripts\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.030582 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-scripts\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.032232 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-fernet-keys\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.034073 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-credential-keys\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.036824 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a23fe2de-1da0-4e06-8b11-90c618eb2a15-config-data\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: E1001 06:32:43.056080 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice/crio-31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5\": RecentStats: unable to find data in memory cache]" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.061496 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm62g\" (UniqueName: \"kubernetes.io/projected/a23fe2de-1da0-4e06-8b11-90c618eb2a15-kube-api-access-fm62g\") pod \"keystone-c7949ccdb-cvdvf\" (UID: \"a23fe2de-1da0-4e06-8b11-90c618eb2a15\") " pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.109772 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.589321 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/keystone-c7949ccdb-cvdvf"] Oct 01 06:32:43 crc kubenswrapper[4747]: W1001 06:32:43.634711 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda23fe2de_1da0_4e06_8b11_90c618eb2a15.slice/crio-3bfd568742e75e217774e3123cbfe71d782cbe1f481eb5764d612c0ebb542d58 WatchSource:0}: Error finding container 3bfd568742e75e217774e3123cbfe71d782cbe1f481eb5764d612c0ebb542d58: Status 404 returned error can't find the container with id 3bfd568742e75e217774e3123cbfe71d782cbe1f481eb5764d612c0ebb542d58 Oct 01 06:32:43 crc kubenswrapper[4747]: I1001 06:32:43.686368 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" event={"ID":"a23fe2de-1da0-4e06-8b11-90c618eb2a15","Type":"ContainerStarted","Data":"3bfd568742e75e217774e3123cbfe71d782cbe1f481eb5764d612c0ebb542d58"} Oct 01 06:32:44 crc kubenswrapper[4747]: I1001 06:32:44.700889 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" event={"ID":"a23fe2de-1da0-4e06-8b11-90c618eb2a15","Type":"ContainerStarted","Data":"16c5c7b073db071bb77e868eccfae2bb04b3bef82147677c5a506f43569abfff"} Oct 01 06:32:44 crc kubenswrapper[4747]: I1001 06:32:44.701372 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:32:44 crc kubenswrapper[4747]: I1001 06:32:44.733712 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" podStartSLOduration=2.733684922 podStartE2EDuration="2.733684922s" podCreationTimestamp="2025-10-01 06:32:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:32:44.722118446 +0000 UTC m=+966.131775555" watchObservedRunningTime="2025-10-01 06:32:44.733684922 +0000 UTC m=+966.143342001" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.374313 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc"] Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.376191 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: W1001 06:32:50.379448 4747 reflector.go:561] object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-zwh7m": failed to list *v1.Secret: secrets "horizon-operator-controller-manager-dockercfg-zwh7m" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Oct 01 06:32:50 crc kubenswrapper[4747]: E1001 06:32:50.379498 4747 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"horizon-operator-controller-manager-dockercfg-zwh7m\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"horizon-operator-controller-manager-dockercfg-zwh7m\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.379628 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-service-cert" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.403219 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc"] Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.539001 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6z6t\" (UniqueName: \"kubernetes.io/projected/cc97678f-a010-49c0-bc9c-a46288467178-kube-api-access-l6z6t\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.539054 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc97678f-a010-49c0-bc9c-a46288467178-webhook-cert\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.539082 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cc97678f-a010-49c0-bc9c-a46288467178-apiservice-cert\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.641110 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6z6t\" (UniqueName: \"kubernetes.io/projected/cc97678f-a010-49c0-bc9c-a46288467178-kube-api-access-l6z6t\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.641162 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc97678f-a010-49c0-bc9c-a46288467178-webhook-cert\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.641187 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cc97678f-a010-49c0-bc9c-a46288467178-apiservice-cert\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.647357 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cc97678f-a010-49c0-bc9c-a46288467178-webhook-cert\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.647840 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cc97678f-a010-49c0-bc9c-a46288467178-apiservice-cert\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:50 crc kubenswrapper[4747]: I1001 06:32:50.662906 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6z6t\" (UniqueName: \"kubernetes.io/projected/cc97678f-a010-49c0-bc9c-a46288467178-kube-api-access-l6z6t\") pod \"horizon-operator-controller-manager-6d8f97bbbb-pmdmc\" (UID: \"cc97678f-a010-49c0-bc9c-a46288467178\") " pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:51 crc kubenswrapper[4747]: I1001 06:32:51.695838 4747 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" secret="" err="failed to sync secret cache: timed out waiting for the condition" Oct 01 06:32:51 crc kubenswrapper[4747]: I1001 06:32:51.696214 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:51 crc kubenswrapper[4747]: I1001 06:32:51.801658 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-zwh7m" Oct 01 06:32:51 crc kubenswrapper[4747]: I1001 06:32:51.970523 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc"] Oct 01 06:32:52 crc kubenswrapper[4747]: I1001 06:32:52.781633 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" event={"ID":"cc97678f-a010-49c0-bc9c-a46288467178","Type":"ContainerStarted","Data":"f0d090934954493007c5917d4a5ff27d7007b9b5aa0d9d8b158eac9c855922af"} Oct 01 06:32:53 crc kubenswrapper[4747]: E1001 06:32:53.205824 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice/crio-31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice\": RecentStats: unable to find data in memory cache]" Oct 01 06:32:54 crc kubenswrapper[4747]: I1001 06:32:54.796692 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" event={"ID":"cc97678f-a010-49c0-bc9c-a46288467178","Type":"ContainerStarted","Data":"45ceae5bff6701a4f3b2cea6e88bf9928ec8352f4a0602068647889d26ce00dc"} Oct 01 06:32:54 crc kubenswrapper[4747]: I1001 06:32:54.797010 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" event={"ID":"cc97678f-a010-49c0-bc9c-a46288467178","Type":"ContainerStarted","Data":"a3300c4771c60f064de95cd0c8c05b619b0ccbcca9144786212e97928445f1ed"} Oct 01 06:32:54 crc kubenswrapper[4747]: I1001 06:32:54.797079 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:32:54 crc kubenswrapper[4747]: I1001 06:32:54.825197 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" podStartSLOduration=3.020392531 podStartE2EDuration="4.825178905s" podCreationTimestamp="2025-10-01 06:32:50 +0000 UTC" firstStartedPulling="2025-10-01 06:32:51.964788074 +0000 UTC m=+973.374445133" lastFinishedPulling="2025-10-01 06:32:53.769574458 +0000 UTC m=+975.179231507" observedRunningTime="2025-10-01 06:32:54.819803039 +0000 UTC m=+976.229460108" watchObservedRunningTime="2025-10-01 06:32:54.825178905 +0000 UTC m=+976.234835954" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.741183 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-64d9946955-5jftj"] Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.742492 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.750651 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-service-cert" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.750951 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-dlzrj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.783891 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-64d9946955-5jftj"] Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.827646 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6179950f-9f69-4c73-8400-b9651eabe647-apiservice-cert\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.827702 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hj7b9\" (UniqueName: \"kubernetes.io/projected/6179950f-9f69-4c73-8400-b9651eabe647-kube-api-access-hj7b9\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.827864 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6179950f-9f69-4c73-8400-b9651eabe647-webhook-cert\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.928920 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6179950f-9f69-4c73-8400-b9651eabe647-apiservice-cert\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.928979 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hj7b9\" (UniqueName: \"kubernetes.io/projected/6179950f-9f69-4c73-8400-b9651eabe647-kube-api-access-hj7b9\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.929025 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6179950f-9f69-4c73-8400-b9651eabe647-webhook-cert\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.935429 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6179950f-9f69-4c73-8400-b9651eabe647-webhook-cert\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.940251 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6179950f-9f69-4c73-8400-b9651eabe647-apiservice-cert\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:56 crc kubenswrapper[4747]: I1001 06:32:56.957712 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hj7b9\" (UniqueName: \"kubernetes.io/projected/6179950f-9f69-4c73-8400-b9651eabe647-kube-api-access-hj7b9\") pod \"swift-operator-controller-manager-64d9946955-5jftj\" (UID: \"6179950f-9f69-4c73-8400-b9651eabe647\") " pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:57 crc kubenswrapper[4747]: I1001 06:32:57.066047 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:32:57 crc kubenswrapper[4747]: I1001 06:32:57.559440 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-64d9946955-5jftj"] Oct 01 06:32:57 crc kubenswrapper[4747]: W1001 06:32:57.567915 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6179950f_9f69_4c73_8400_b9651eabe647.slice/crio-18005676cd45d51eb3f089437b0d5ca0ab4dbd2001f8d63ecd1087e2a4ae6514 WatchSource:0}: Error finding container 18005676cd45d51eb3f089437b0d5ca0ab4dbd2001f8d63ecd1087e2a4ae6514: Status 404 returned error can't find the container with id 18005676cd45d51eb3f089437b0d5ca0ab4dbd2001f8d63ecd1087e2a4ae6514 Oct 01 06:32:57 crc kubenswrapper[4747]: I1001 06:32:57.825300 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" event={"ID":"6179950f-9f69-4c73-8400-b9651eabe647","Type":"ContainerStarted","Data":"18005676cd45d51eb3f089437b0d5ca0ab4dbd2001f8d63ecd1087e2a4ae6514"} Oct 01 06:33:00 crc kubenswrapper[4747]: I1001 06:33:00.857059 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" event={"ID":"6179950f-9f69-4c73-8400-b9651eabe647","Type":"ContainerStarted","Data":"80136bd65f8573535553409e9625f094883f1e3fbedb1657531f5cf3fe769411"} Oct 01 06:33:00 crc kubenswrapper[4747]: I1001 06:33:00.857744 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:33:00 crc kubenswrapper[4747]: I1001 06:33:00.857791 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" event={"ID":"6179950f-9f69-4c73-8400-b9651eabe647","Type":"ContainerStarted","Data":"599aa87f9d9b7ac2bc8a1235cb14d5254c6f0cea2fb6ec0790297f83e4d5b13c"} Oct 01 06:33:00 crc kubenswrapper[4747]: I1001 06:33:00.878857 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" podStartSLOduration=2.567952197 podStartE2EDuration="4.878841163s" podCreationTimestamp="2025-10-01 06:32:56 +0000 UTC" firstStartedPulling="2025-10-01 06:32:57.57048601 +0000 UTC m=+978.980143069" lastFinishedPulling="2025-10-01 06:32:59.881374976 +0000 UTC m=+981.291032035" observedRunningTime="2025-10-01 06:33:00.875156651 +0000 UTC m=+982.284813740" watchObservedRunningTime="2025-10-01 06:33:00.878841163 +0000 UTC m=+982.288498212" Oct 01 06:33:01 crc kubenswrapper[4747]: I1001 06:33:01.704578 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6d8f97bbbb-pmdmc" Oct 01 06:33:03 crc kubenswrapper[4747]: E1001 06:33:03.433206 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice/crio-31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice\": RecentStats: unable to find data in memory cache]" Oct 01 06:33:05 crc kubenswrapper[4747]: I1001 06:33:05.761430 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:33:05 crc kubenswrapper[4747]: I1001 06:33:05.761769 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:33:07 crc kubenswrapper[4747]: I1001 06:33:07.071698 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-64d9946955-5jftj" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.356586 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.371898 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.373839 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-swift-dockercfg-rg5g2" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.374426 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-storage-config-data" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.375058 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-files" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.377247 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-conf" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.401374 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.511128 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.511223 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbxsw\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-kube-api-access-nbxsw\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.511303 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.511386 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/55fda102-0ab8-430c-b4b9-2ca87772c44d-lock\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.511430 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/55fda102-0ab8-430c-b4b9-2ca87772c44d-cache\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.613104 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.613175 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/55fda102-0ab8-430c-b4b9-2ca87772c44d-lock\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.613201 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/55fda102-0ab8-430c-b4b9-2ca87772c44d-cache\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.613237 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.613268 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbxsw\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-kube-api-access-nbxsw\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: E1001 06:33:09.613483 4747 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.613506 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") device mount path \"/mnt/openstack/pv06\"" pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: E1001 06:33:09.613526 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 01 06:33:09 crc kubenswrapper[4747]: E1001 06:33:09.613627 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift podName:55fda102-0ab8-430c-b4b9-2ca87772c44d nodeName:}" failed. No retries permitted until 2025-10-01 06:33:10.113611987 +0000 UTC m=+991.523269036 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift") pod "swift-storage-0" (UID: "55fda102-0ab8-430c-b4b9-2ca87772c44d") : configmap "swift-ring-files" not found Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.613939 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/55fda102-0ab8-430c-b4b9-2ca87772c44d-cache\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.614273 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/55fda102-0ab8-430c-b4b9-2ca87772c44d-lock\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.637248 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbxsw\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-kube-api-access-nbxsw\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.644237 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.744715 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-qtqmv"] Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.745475 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.747255 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"swift-proxy-config-data" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.747473 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-config-data" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.748374 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"swift-ring-scripts" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.771271 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-qtqmv"] Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.815267 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/928ccacf-68c1-4861-9804-320bcdc66f93-etc-swift\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.815337 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-dispersionconf\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.815364 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.815384 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-scripts\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.815500 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-ring-data-devices\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.815637 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2rgc\" (UniqueName: \"kubernetes.io/projected/928ccacf-68c1-4861-9804-320bcdc66f93-kube-api-access-r2rgc\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.916737 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/928ccacf-68c1-4861-9804-320bcdc66f93-etc-swift\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.917258 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-dispersionconf\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.917456 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.917507 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/928ccacf-68c1-4861-9804-320bcdc66f93-etc-swift\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.917619 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-scripts\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.917947 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-ring-data-devices\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.918140 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2rgc\" (UniqueName: \"kubernetes.io/projected/928ccacf-68c1-4861-9804-320bcdc66f93-kube-api-access-r2rgc\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.918861 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-scripts\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.919089 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-ring-data-devices\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.922737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-dispersionconf\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.932495 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:09 crc kubenswrapper[4747]: I1001 06:33:09.946672 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2rgc\" (UniqueName: \"kubernetes.io/projected/928ccacf-68c1-4861-9804-320bcdc66f93-kube-api-access-r2rgc\") pod \"swift-ring-rebalance-qtqmv\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:10 crc kubenswrapper[4747]: I1001 06:33:10.059129 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:10 crc kubenswrapper[4747]: I1001 06:33:10.121534 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:10 crc kubenswrapper[4747]: E1001 06:33:10.121880 4747 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 01 06:33:10 crc kubenswrapper[4747]: E1001 06:33:10.121922 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 01 06:33:10 crc kubenswrapper[4747]: E1001 06:33:10.122000 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift podName:55fda102-0ab8-430c-b4b9-2ca87772c44d nodeName:}" failed. No retries permitted until 2025-10-01 06:33:11.121974654 +0000 UTC m=+992.531631723 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift") pod "swift-storage-0" (UID: "55fda102-0ab8-430c-b4b9-2ca87772c44d") : configmap "swift-ring-files" not found Oct 01 06:33:10 crc kubenswrapper[4747]: I1001 06:33:10.337873 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-ring-rebalance-qtqmv"] Oct 01 06:33:10 crc kubenswrapper[4747]: W1001 06:33:10.352961 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod928ccacf_68c1_4861_9804_320bcdc66f93.slice/crio-984cce4154290659bf31ce39ce5b7f815833f61ee2fd52a280bb72eb0ff01f23 WatchSource:0}: Error finding container 984cce4154290659bf31ce39ce5b7f815833f61ee2fd52a280bb72eb0ff01f23: Status 404 returned error can't find the container with id 984cce4154290659bf31ce39ce5b7f815833f61ee2fd52a280bb72eb0ff01f23 Oct 01 06:33:10 crc kubenswrapper[4747]: I1001 06:33:10.950905 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" event={"ID":"928ccacf-68c1-4861-9804-320bcdc66f93","Type":"ContainerStarted","Data":"984cce4154290659bf31ce39ce5b7f815833f61ee2fd52a280bb72eb0ff01f23"} Oct 01 06:33:11 crc kubenswrapper[4747]: I1001 06:33:11.139541 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:11 crc kubenswrapper[4747]: E1001 06:33:11.139983 4747 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 01 06:33:11 crc kubenswrapper[4747]: E1001 06:33:11.140099 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 01 06:33:11 crc kubenswrapper[4747]: E1001 06:33:11.140161 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift podName:55fda102-0ab8-430c-b4b9-2ca87772c44d nodeName:}" failed. No retries permitted until 2025-10-01 06:33:13.140142146 +0000 UTC m=+994.549799195 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift") pod "swift-storage-0" (UID: "55fda102-0ab8-430c-b4b9-2ca87772c44d") : configmap "swift-ring-files" not found Oct 01 06:33:13 crc kubenswrapper[4747]: I1001 06:33:13.173846 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:13 crc kubenswrapper[4747]: E1001 06:33:13.174024 4747 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 01 06:33:13 crc kubenswrapper[4747]: E1001 06:33:13.174204 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 01 06:33:13 crc kubenswrapper[4747]: E1001 06:33:13.174254 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift podName:55fda102-0ab8-430c-b4b9-2ca87772c44d nodeName:}" failed. No retries permitted until 2025-10-01 06:33:17.174239716 +0000 UTC m=+998.583896765 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift") pod "swift-storage-0" (UID: "55fda102-0ab8-430c-b4b9-2ca87772c44d") : configmap "swift-ring-files" not found Oct 01 06:33:13 crc kubenswrapper[4747]: E1001 06:33:13.588055 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice/crio-31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice\": RecentStats: unable to find data in memory cache]" Oct 01 06:33:13 crc kubenswrapper[4747]: I1001 06:33:13.972903 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" event={"ID":"928ccacf-68c1-4861-9804-320bcdc66f93","Type":"ContainerStarted","Data":"ae24c79f8f36aacd0514266ab198b06b35a6945d13ee14c085e034779fa468b2"} Oct 01 06:33:13 crc kubenswrapper[4747]: I1001 06:33:13.998457 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" podStartSLOduration=1.687239216 podStartE2EDuration="4.998434045s" podCreationTimestamp="2025-10-01 06:33:09 +0000 UTC" firstStartedPulling="2025-10-01 06:33:10.36253032 +0000 UTC m=+991.772187389" lastFinishedPulling="2025-10-01 06:33:13.673725169 +0000 UTC m=+995.083382218" observedRunningTime="2025-10-01 06:33:13.992502859 +0000 UTC m=+995.402159918" watchObservedRunningTime="2025-10-01 06:33:13.998434045 +0000 UTC m=+995.408091124" Oct 01 06:33:14 crc kubenswrapper[4747]: I1001 06:33:14.515922 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/keystone-c7949ccdb-cvdvf" Oct 01 06:33:14 crc kubenswrapper[4747]: I1001 06:33:14.760814 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-index-lg8t8"] Oct 01 06:33:14 crc kubenswrapper[4747]: I1001 06:33:14.764649 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:14 crc kubenswrapper[4747]: I1001 06:33:14.768094 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-index-dockercfg-gkmnq" Oct 01 06:33:14 crc kubenswrapper[4747]: I1001 06:33:14.771487 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-index-lg8t8"] Oct 01 06:33:14 crc kubenswrapper[4747]: I1001 06:33:14.900582 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d5gw\" (UniqueName: \"kubernetes.io/projected/5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f-kube-api-access-8d5gw\") pod \"glance-operator-index-lg8t8\" (UID: \"5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f\") " pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:15 crc kubenswrapper[4747]: I1001 06:33:15.001772 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d5gw\" (UniqueName: \"kubernetes.io/projected/5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f-kube-api-access-8d5gw\") pod \"glance-operator-index-lg8t8\" (UID: \"5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f\") " pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:15 crc kubenswrapper[4747]: I1001 06:33:15.032428 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d5gw\" (UniqueName: \"kubernetes.io/projected/5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f-kube-api-access-8d5gw\") pod \"glance-operator-index-lg8t8\" (UID: \"5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f\") " pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:15 crc kubenswrapper[4747]: I1001 06:33:15.096155 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:15 crc kubenswrapper[4747]: I1001 06:33:15.406626 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-index-lg8t8"] Oct 01 06:33:15 crc kubenswrapper[4747]: I1001 06:33:15.995134 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-lg8t8" event={"ID":"5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f","Type":"ContainerStarted","Data":"15ac78464a39bfca6078fc6ffd9511f8f3b21ae4c981163c5c8bc9b2d98f87ef"} Oct 01 06:33:17 crc kubenswrapper[4747]: I1001 06:33:17.236486 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:17 crc kubenswrapper[4747]: E1001 06:33:17.236673 4747 projected.go:288] Couldn't get configMap glance-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Oct 01 06:33:17 crc kubenswrapper[4747]: E1001 06:33:17.236958 4747 projected.go:194] Error preparing data for projected volume etc-swift for pod glance-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Oct 01 06:33:17 crc kubenswrapper[4747]: E1001 06:33:17.237025 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift podName:55fda102-0ab8-430c-b4b9-2ca87772c44d nodeName:}" failed. No retries permitted until 2025-10-01 06:33:25.237001141 +0000 UTC m=+1006.646658190 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift") pod "swift-storage-0" (UID: "55fda102-0ab8-430c-b4b9-2ca87772c44d") : configmap "swift-ring-files" not found Oct 01 06:33:18 crc kubenswrapper[4747]: I1001 06:33:18.012209 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-index-lg8t8" event={"ID":"5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f","Type":"ContainerStarted","Data":"a8ab14947c03826060fc2f46d3f8046b929a426ef41620046a2fe99f78745627"} Oct 01 06:33:18 crc kubenswrapper[4747]: I1001 06:33:18.036554 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-index-lg8t8" podStartSLOduration=2.248067522 podStartE2EDuration="4.036532287s" podCreationTimestamp="2025-10-01 06:33:14 +0000 UTC" firstStartedPulling="2025-10-01 06:33:15.411541371 +0000 UTC m=+996.821198430" lastFinishedPulling="2025-10-01 06:33:17.200006136 +0000 UTC m=+998.609663195" observedRunningTime="2025-10-01 06:33:18.030318026 +0000 UTC m=+999.439975095" watchObservedRunningTime="2025-10-01 06:33:18.036532287 +0000 UTC m=+999.446189346" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.038425 4747 generic.go:334] "Generic (PLEG): container finished" podID="928ccacf-68c1-4861-9804-320bcdc66f93" containerID="ae24c79f8f36aacd0514266ab198b06b35a6945d13ee14c085e034779fa468b2" exitCode=0 Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.038526 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" event={"ID":"928ccacf-68c1-4861-9804-320bcdc66f93","Type":"ContainerDied","Data":"ae24c79f8f36aacd0514266ab198b06b35a6945d13ee14c085e034779fa468b2"} Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.318446 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/swift-proxy-7578798499-5kbdv"] Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.320176 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.347904 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-proxy-7578798499-5kbdv"] Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.404414 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-run-httpd\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.404570 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64spk\" (UniqueName: \"kubernetes.io/projected/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-kube-api-access-64spk\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.404618 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-log-httpd\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.404650 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-etc-swift\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.404675 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-config-data\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.506405 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64spk\" (UniqueName: \"kubernetes.io/projected/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-kube-api-access-64spk\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.506488 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-log-httpd\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.506540 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-etc-swift\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.506585 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-config-data\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.506741 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-run-httpd\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.507580 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-run-httpd\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.507620 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-log-httpd\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.513292 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-config-data\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.513787 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-etc-swift\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.527127 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64spk\" (UniqueName: \"kubernetes.io/projected/a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c-kube-api-access-64spk\") pod \"swift-proxy-7578798499-5kbdv\" (UID: \"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c\") " pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:21 crc kubenswrapper[4747]: I1001 06:33:21.642368 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.131203 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-proxy-7578798499-5kbdv"] Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.366874 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.429784 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-ring-data-devices\") pod \"928ccacf-68c1-4861-9804-320bcdc66f93\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.429848 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf\") pod \"928ccacf-68c1-4861-9804-320bcdc66f93\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.429885 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2rgc\" (UniqueName: \"kubernetes.io/projected/928ccacf-68c1-4861-9804-320bcdc66f93-kube-api-access-r2rgc\") pod \"928ccacf-68c1-4861-9804-320bcdc66f93\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.429943 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-scripts\") pod \"928ccacf-68c1-4861-9804-320bcdc66f93\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.429983 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/928ccacf-68c1-4861-9804-320bcdc66f93-etc-swift\") pod \"928ccacf-68c1-4861-9804-320bcdc66f93\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.430036 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-dispersionconf\") pod \"928ccacf-68c1-4861-9804-320bcdc66f93\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.430962 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "928ccacf-68c1-4861-9804-320bcdc66f93" (UID: "928ccacf-68c1-4861-9804-320bcdc66f93"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.431914 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/928ccacf-68c1-4861-9804-320bcdc66f93-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "928ccacf-68c1-4861-9804-320bcdc66f93" (UID: "928ccacf-68c1-4861-9804-320bcdc66f93"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.434897 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/928ccacf-68c1-4861-9804-320bcdc66f93-kube-api-access-r2rgc" (OuterVolumeSpecName: "kube-api-access-r2rgc") pod "928ccacf-68c1-4861-9804-320bcdc66f93" (UID: "928ccacf-68c1-4861-9804-320bcdc66f93"). InnerVolumeSpecName "kube-api-access-r2rgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.443145 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "928ccacf-68c1-4861-9804-320bcdc66f93" (UID: "928ccacf-68c1-4861-9804-320bcdc66f93"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:33:22 crc kubenswrapper[4747]: E1001 06:33:22.446019 4747 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf podName:928ccacf-68c1-4861-9804-320bcdc66f93 nodeName:}" failed. No retries permitted until 2025-10-01 06:33:22.945994158 +0000 UTC m=+1004.355651207 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "swiftconf" (UniqueName: "kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf") pod "928ccacf-68c1-4861-9804-320bcdc66f93" (UID: "928ccacf-68c1-4861-9804-320bcdc66f93") : error deleting /var/lib/kubelet/pods/928ccacf-68c1-4861-9804-320bcdc66f93/volume-subpaths: remove /var/lib/kubelet/pods/928ccacf-68c1-4861-9804-320bcdc66f93/volume-subpaths: no such file or directory Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.446358 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-scripts" (OuterVolumeSpecName: "scripts") pod "928ccacf-68c1-4861-9804-320bcdc66f93" (UID: "928ccacf-68c1-4861-9804-320bcdc66f93"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.531533 4747 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.531582 4747 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.531602 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2rgc\" (UniqueName: \"kubernetes.io/projected/928ccacf-68c1-4861-9804-320bcdc66f93-kube-api-access-r2rgc\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.531623 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/928ccacf-68c1-4861-9804-320bcdc66f93-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:22 crc kubenswrapper[4747]: I1001 06:33:22.531642 4747 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/928ccacf-68c1-4861-9804-320bcdc66f93-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.039407 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf\") pod \"928ccacf-68c1-4861-9804-320bcdc66f93\" (UID: \"928ccacf-68c1-4861-9804-320bcdc66f93\") " Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.056348 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "928ccacf-68c1-4861-9804-320bcdc66f93" (UID: "928ccacf-68c1-4861-9804-320bcdc66f93"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.086882 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" event={"ID":"928ccacf-68c1-4861-9804-320bcdc66f93","Type":"ContainerDied","Data":"984cce4154290659bf31ce39ce5b7f815833f61ee2fd52a280bb72eb0ff01f23"} Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.086939 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="984cce4154290659bf31ce39ce5b7f815833f61ee2fd52a280bb72eb0ff01f23" Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.087009 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-ring-rebalance-qtqmv" Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.105087 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" event={"ID":"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c","Type":"ContainerStarted","Data":"ca61f28ebc662ad3548635efa75e9837f8c338fead52ea1429ac4f0d9fa8fa6c"} Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.105167 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" event={"ID":"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c","Type":"ContainerStarted","Data":"991d4967c5dfe62e82257e9054593602e242f37f290c8101e6f3fc784de978bf"} Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.105187 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" event={"ID":"a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c","Type":"ContainerStarted","Data":"3b1d18b0c92bc532670e8803f15bb9b84d4ca364431ebf2863fd623698d7da1a"} Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.106062 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.106174 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.136921 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" podStartSLOduration=2.136895205 podStartE2EDuration="2.136895205s" podCreationTimestamp="2025-10-01 06:33:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:33:23.13311233 +0000 UTC m=+1004.542769389" watchObservedRunningTime="2025-10-01 06:33:23.136895205 +0000 UTC m=+1004.546552284" Oct 01 06:33:23 crc kubenswrapper[4747]: I1001 06:33:23.141603 4747 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/928ccacf-68c1-4861-9804-320bcdc66f93-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:23 crc kubenswrapper[4747]: E1001 06:33:23.782648 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice/crio-31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice\": RecentStats: unable to find data in memory cache]" Oct 01 06:33:25 crc kubenswrapper[4747]: I1001 06:33:25.096941 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:25 crc kubenswrapper[4747]: I1001 06:33:25.097294 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:25 crc kubenswrapper[4747]: I1001 06:33:25.137259 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:25 crc kubenswrapper[4747]: I1001 06:33:25.174917 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-index-lg8t8" Oct 01 06:33:25 crc kubenswrapper[4747]: I1001 06:33:25.269862 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:25 crc kubenswrapper[4747]: I1001 06:33:25.278134 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/55fda102-0ab8-430c-b4b9-2ca87772c44d-etc-swift\") pod \"swift-storage-0\" (UID: \"55fda102-0ab8-430c-b4b9-2ca87772c44d\") " pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:25 crc kubenswrapper[4747]: I1001 06:33:25.293552 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/swift-storage-0" Oct 01 06:33:25 crc kubenswrapper[4747]: I1001 06:33:25.566367 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/swift-storage-0"] Oct 01 06:33:26 crc kubenswrapper[4747]: I1001 06:33:26.135792 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"a6a1367b307d82293535cdea10a90b45fa5304adc7d8138efddfd1521b548f38"} Oct 01 06:33:28 crc kubenswrapper[4747]: I1001 06:33:28.153850 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"291346151db38c169f657365de7f0de5f3266657bc89057cf7ca44d589cd5dcc"} Oct 01 06:33:28 crc kubenswrapper[4747]: I1001 06:33:28.155829 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"b4af2eecf23a636a05bb4d201bd9ab158b11aef0f15f83d4c44f71839ca7daac"} Oct 01 06:33:28 crc kubenswrapper[4747]: I1001 06:33:28.155865 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"20ea109be737eb06c410f875b380bd5efaba689831ae0ec239256d04c35c2089"} Oct 01 06:33:28 crc kubenswrapper[4747]: I1001 06:33:28.155877 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"abc152f906bfa6c80d7f26839479e8625bb73934e325796708f8a4ecfb80f1b5"} Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.367204 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r"] Oct 01 06:33:29 crc kubenswrapper[4747]: E1001 06:33:29.367794 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="928ccacf-68c1-4861-9804-320bcdc66f93" containerName="swift-ring-rebalance" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.367808 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="928ccacf-68c1-4861-9804-320bcdc66f93" containerName="swift-ring-rebalance" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.367966 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="928ccacf-68c1-4861-9804-320bcdc66f93" containerName="swift-ring-rebalance" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.368857 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.381241 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-b9vtl" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.382725 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r"] Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.443511 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-util\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.443578 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqdz4\" (UniqueName: \"kubernetes.io/projected/291e00e8-ae3e-4eaa-8dd7-056c954d4800-kube-api-access-fqdz4\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.443648 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-bundle\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.545101 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-util\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.545163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqdz4\" (UniqueName: \"kubernetes.io/projected/291e00e8-ae3e-4eaa-8dd7-056c954d4800-kube-api-access-fqdz4\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.545208 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-bundle\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.545907 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-util\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.545985 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-bundle\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.569660 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqdz4\" (UniqueName: \"kubernetes.io/projected/291e00e8-ae3e-4eaa-8dd7-056c954d4800-kube-api-access-fqdz4\") pod \"bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.698526 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:29 crc kubenswrapper[4747]: I1001 06:33:29.956301 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r"] Oct 01 06:33:29 crc kubenswrapper[4747]: W1001 06:33:29.959272 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod291e00e8_ae3e_4eaa_8dd7_056c954d4800.slice/crio-a0abd4a2ee3324fff73c9a401ab51b57f1cc782b394970fb44a1f0c84bb8cc5b WatchSource:0}: Error finding container a0abd4a2ee3324fff73c9a401ab51b57f1cc782b394970fb44a1f0c84bb8cc5b: Status 404 returned error can't find the container with id a0abd4a2ee3324fff73c9a401ab51b57f1cc782b394970fb44a1f0c84bb8cc5b Oct 01 06:33:30 crc kubenswrapper[4747]: I1001 06:33:30.167814 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" event={"ID":"291e00e8-ae3e-4eaa-8dd7-056c954d4800","Type":"ContainerStarted","Data":"a0abd4a2ee3324fff73c9a401ab51b57f1cc782b394970fb44a1f0c84bb8cc5b"} Oct 01 06:33:30 crc kubenswrapper[4747]: I1001 06:33:30.171031 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"0c1fc3b8906d6898ba42203e6ce734c9f9f448f772f7b6f2965f09de4e1ce605"} Oct 01 06:33:30 crc kubenswrapper[4747]: I1001 06:33:30.171056 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"e08667042f68dfc4d1a7a9e9145f5979ab9b32bc901e8747ab897d99676092a6"} Oct 01 06:33:30 crc kubenswrapper[4747]: I1001 06:33:30.171065 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"56dde17468023c0790c0480a66e2b0d18ec799c7d5deebed6283a059d70d2329"} Oct 01 06:33:30 crc kubenswrapper[4747]: I1001 06:33:30.171073 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"52e2f2b1834f9812d56ef3831b40854b914139f307bfc5d13602a3020f5211d6"} Oct 01 06:33:31 crc kubenswrapper[4747]: I1001 06:33:31.187519 4747 generic.go:334] "Generic (PLEG): container finished" podID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerID="7914bbd5cca773e1debc99a4c60df21ba48361c3796fa5657bb8793d84fe85d8" exitCode=0 Oct 01 06:33:31 crc kubenswrapper[4747]: I1001 06:33:31.187601 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" event={"ID":"291e00e8-ae3e-4eaa-8dd7-056c954d4800","Type":"ContainerDied","Data":"7914bbd5cca773e1debc99a4c60df21ba48361c3796fa5657bb8793d84fe85d8"} Oct 01 06:33:31 crc kubenswrapper[4747]: I1001 06:33:31.647395 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:31 crc kubenswrapper[4747]: I1001 06:33:31.656178 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/swift-proxy-7578798499-5kbdv" Oct 01 06:33:32 crc kubenswrapper[4747]: I1001 06:33:32.203150 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"4cf14ab104822265e0d633d0e50f1edb094d3c0ed9d960178c03ac5b4ed8c5af"} Oct 01 06:33:32 crc kubenswrapper[4747]: I1001 06:33:32.206526 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" event={"ID":"291e00e8-ae3e-4eaa-8dd7-056c954d4800","Type":"ContainerStarted","Data":"1f90c101d376494dbb5089a55d120807b7b5c544a42f60b5e54b143bcb8fd2b2"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.258327 4747 generic.go:334] "Generic (PLEG): container finished" podID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerID="1f90c101d376494dbb5089a55d120807b7b5c544a42f60b5e54b143bcb8fd2b2" exitCode=0 Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.258385 4747 generic.go:334] "Generic (PLEG): container finished" podID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerID="c8780f8281cfd8dc49e87b9c188124cd942d19e76da6e373cb7c694d7ffaca9b" exitCode=0 Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.258455 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" event={"ID":"291e00e8-ae3e-4eaa-8dd7-056c954d4800","Type":"ContainerDied","Data":"1f90c101d376494dbb5089a55d120807b7b5c544a42f60b5e54b143bcb8fd2b2"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.258495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" event={"ID":"291e00e8-ae3e-4eaa-8dd7-056c954d4800","Type":"ContainerDied","Data":"c8780f8281cfd8dc49e87b9c188124cd942d19e76da6e373cb7c694d7ffaca9b"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.299875 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"1fdaa8577a0a7d39aa9ce84a4d0965feadbc3a2501e72e8bdaba5401b1adcabf"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.299940 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"0737f88e6350de33f0b930778dbb9b896c02f41804766f360b6f9c4482db52c1"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.299958 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"0b5749b7e5d9644ea2f4141523593049c68182de67b5ca34498b111243269f4f"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.299975 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"2c4a32d54b20881bcdfa81e9b3cd1f7098d1f8486c9209f77f6951b47aa44158"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.299993 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"0de1e72bd67cae1d57325fa76db3afd39816624b67364fe5f5eba769d6f93fd4"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.300010 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/swift-storage-0" event={"ID":"55fda102-0ab8-430c-b4b9-2ca87772c44d","Type":"ContainerStarted","Data":"0e15be4ad7cd7fa146277c4ae319ec61ecde774c44645a697da4e17e70522853"} Oct 01 06:33:33 crc kubenswrapper[4747]: I1001 06:33:33.332294 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/swift-storage-0" podStartSLOduration=18.980833419 podStartE2EDuration="25.332273116s" podCreationTimestamp="2025-10-01 06:33:08 +0000 UTC" firstStartedPulling="2025-10-01 06:33:25.57571024 +0000 UTC m=+1006.985367289" lastFinishedPulling="2025-10-01 06:33:31.927149937 +0000 UTC m=+1013.336806986" observedRunningTime="2025-10-01 06:33:33.330139444 +0000 UTC m=+1014.739796503" watchObservedRunningTime="2025-10-01 06:33:33.332273116 +0000 UTC m=+1014.741930205" Oct 01 06:33:34 crc kubenswrapper[4747]: E1001 06:33:34.002395 4747 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice/crio-31266020d88d1b9d6d27241b121b7431ee07492fee029761ce97e852038a50a5\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4150451_d4ba_43d4_a834_f97510776094.slice\": RecentStats: unable to find data in memory cache]" Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.609882 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.722708 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-bundle\") pod \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.722800 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-util\") pod \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.722891 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqdz4\" (UniqueName: \"kubernetes.io/projected/291e00e8-ae3e-4eaa-8dd7-056c954d4800-kube-api-access-fqdz4\") pod \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\" (UID: \"291e00e8-ae3e-4eaa-8dd7-056c954d4800\") " Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.723271 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-bundle" (OuterVolumeSpecName: "bundle") pod "291e00e8-ae3e-4eaa-8dd7-056c954d4800" (UID: "291e00e8-ae3e-4eaa-8dd7-056c954d4800"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.731798 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/291e00e8-ae3e-4eaa-8dd7-056c954d4800-kube-api-access-fqdz4" (OuterVolumeSpecName: "kube-api-access-fqdz4") pod "291e00e8-ae3e-4eaa-8dd7-056c954d4800" (UID: "291e00e8-ae3e-4eaa-8dd7-056c954d4800"). InnerVolumeSpecName "kube-api-access-fqdz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.737725 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-util" (OuterVolumeSpecName: "util") pod "291e00e8-ae3e-4eaa-8dd7-056c954d4800" (UID: "291e00e8-ae3e-4eaa-8dd7-056c954d4800"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.824583 4747 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-util\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.824662 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqdz4\" (UniqueName: \"kubernetes.io/projected/291e00e8-ae3e-4eaa-8dd7-056c954d4800-kube-api-access-fqdz4\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:34 crc kubenswrapper[4747]: I1001 06:33:34.824687 4747 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/291e00e8-ae3e-4eaa-8dd7-056c954d4800-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:33:35 crc kubenswrapper[4747]: I1001 06:33:35.302828 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" event={"ID":"291e00e8-ae3e-4eaa-8dd7-056c954d4800","Type":"ContainerDied","Data":"a0abd4a2ee3324fff73c9a401ab51b57f1cc782b394970fb44a1f0c84bb8cc5b"} Oct 01 06:33:35 crc kubenswrapper[4747]: I1001 06:33:35.302917 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0abd4a2ee3324fff73c9a401ab51b57f1cc782b394970fb44a1f0c84bb8cc5b" Oct 01 06:33:35 crc kubenswrapper[4747]: I1001 06:33:35.302978 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r" Oct 01 06:33:35 crc kubenswrapper[4747]: I1001 06:33:35.762271 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:33:35 crc kubenswrapper[4747]: I1001 06:33:35.762369 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.564140 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp"] Oct 01 06:33:55 crc kubenswrapper[4747]: E1001 06:33:55.565124 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerName="util" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.565144 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerName="util" Oct 01 06:33:55 crc kubenswrapper[4747]: E1001 06:33:55.565190 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerName="extract" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.565203 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerName="extract" Oct 01 06:33:55 crc kubenswrapper[4747]: E1001 06:33:55.565222 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerName="pull" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.565236 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerName="pull" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.565476 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="291e00e8-ae3e-4eaa-8dd7-056c954d4800" containerName="extract" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.566706 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.569771 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-service-cert" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.569791 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-bk4rj" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.585089 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp"] Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.734864 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/32800c0d-bcf4-4f5d-b8db-598f4450ce31-apiservice-cert\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.735023 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x54cq\" (UniqueName: \"kubernetes.io/projected/32800c0d-bcf4-4f5d-b8db-598f4450ce31-kube-api-access-x54cq\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.735129 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/32800c0d-bcf4-4f5d-b8db-598f4450ce31-webhook-cert\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.836516 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/32800c0d-bcf4-4f5d-b8db-598f4450ce31-apiservice-cert\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.836628 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x54cq\" (UniqueName: \"kubernetes.io/projected/32800c0d-bcf4-4f5d-b8db-598f4450ce31-kube-api-access-x54cq\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.836685 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/32800c0d-bcf4-4f5d-b8db-598f4450ce31-webhook-cert\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.845253 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/32800c0d-bcf4-4f5d-b8db-598f4450ce31-webhook-cert\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.845282 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/32800c0d-bcf4-4f5d-b8db-598f4450ce31-apiservice-cert\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.865314 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x54cq\" (UniqueName: \"kubernetes.io/projected/32800c0d-bcf4-4f5d-b8db-598f4450ce31-kube-api-access-x54cq\") pod \"glance-operator-controller-manager-595f9d7bb-btldp\" (UID: \"32800c0d-bcf4-4f5d-b8db-598f4450ce31\") " pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:55 crc kubenswrapper[4747]: I1001 06:33:55.885141 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:56 crc kubenswrapper[4747]: I1001 06:33:56.368256 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp"] Oct 01 06:33:56 crc kubenswrapper[4747]: W1001 06:33:56.372164 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32800c0d_bcf4_4f5d_b8db_598f4450ce31.slice/crio-d1d556a14bb420db7231f07c611543e9b297a17df2159e2b639f57bdfa199484 WatchSource:0}: Error finding container d1d556a14bb420db7231f07c611543e9b297a17df2159e2b639f57bdfa199484: Status 404 returned error can't find the container with id d1d556a14bb420db7231f07c611543e9b297a17df2159e2b639f57bdfa199484 Oct 01 06:33:56 crc kubenswrapper[4747]: I1001 06:33:56.494356 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" event={"ID":"32800c0d-bcf4-4f5d-b8db-598f4450ce31","Type":"ContainerStarted","Data":"d1d556a14bb420db7231f07c611543e9b297a17df2159e2b639f57bdfa199484"} Oct 01 06:33:58 crc kubenswrapper[4747]: I1001 06:33:58.514153 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" event={"ID":"32800c0d-bcf4-4f5d-b8db-598f4450ce31","Type":"ContainerStarted","Data":"cd5a4bd1fa0a2c7b7669a26023bf133367a3270e159b2f51bebd7efbc022fb31"} Oct 01 06:33:59 crc kubenswrapper[4747]: I1001 06:33:59.525349 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" event={"ID":"32800c0d-bcf4-4f5d-b8db-598f4450ce31","Type":"ContainerStarted","Data":"d975f56bbd703342e03384d4f3abb4016d0d575c20665fd7839e88ed844f51ce"} Oct 01 06:33:59 crc kubenswrapper[4747]: I1001 06:33:59.525732 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:33:59 crc kubenswrapper[4747]: I1001 06:33:59.558742 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" podStartSLOduration=2.218529141 podStartE2EDuration="4.558713574s" podCreationTimestamp="2025-10-01 06:33:55 +0000 UTC" firstStartedPulling="2025-10-01 06:33:56.373950011 +0000 UTC m=+1037.783607080" lastFinishedPulling="2025-10-01 06:33:58.714134454 +0000 UTC m=+1040.123791513" observedRunningTime="2025-10-01 06:33:59.548040573 +0000 UTC m=+1040.957697692" watchObservedRunningTime="2025-10-01 06:33:59.558713574 +0000 UTC m=+1040.968370663" Oct 01 06:34:05 crc kubenswrapper[4747]: I1001 06:34:05.761392 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:34:05 crc kubenswrapper[4747]: I1001 06:34:05.762208 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:34:05 crc kubenswrapper[4747]: I1001 06:34:05.762289 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:34:05 crc kubenswrapper[4747]: I1001 06:34:05.763235 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c7fef4e888b7d3576c874bccbc790853fe54a02c39edee04d55581bad2028dc9"} pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:34:05 crc kubenswrapper[4747]: I1001 06:34:05.763318 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" containerID="cri-o://c7fef4e888b7d3576c874bccbc790853fe54a02c39edee04d55581bad2028dc9" gracePeriod=600 Oct 01 06:34:05 crc kubenswrapper[4747]: I1001 06:34:05.896281 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-595f9d7bb-btldp" Oct 01 06:34:06 crc kubenswrapper[4747]: I1001 06:34:06.591646 4747 generic.go:334] "Generic (PLEG): container finished" podID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerID="c7fef4e888b7d3576c874bccbc790853fe54a02c39edee04d55581bad2028dc9" exitCode=0 Oct 01 06:34:06 crc kubenswrapper[4747]: I1001 06:34:06.591724 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerDied","Data":"c7fef4e888b7d3576c874bccbc790853fe54a02c39edee04d55581bad2028dc9"} Oct 01 06:34:06 crc kubenswrapper[4747]: I1001 06:34:06.592073 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"f2895e753844a4a5cdf39762ff3e165f0014207b47172cc667faca0e96eb7319"} Oct 01 06:34:06 crc kubenswrapper[4747]: I1001 06:34:06.592107 4747 scope.go:117] "RemoveContainer" containerID="779a7206e770a3d3eff5fabc4a08045c36917bf566f468ea4269d33bee1db67f" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.094693 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.095781 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.098823 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.098899 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts-9db6gc427h" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.099408 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"default-dockercfg-mvtsj" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.099942 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"openstack-config-secret" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.123661 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.148085 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-7spvf"] Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.149157 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-7spvf" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.156563 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config-secret\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.156607 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkqsf\" (UniqueName: \"kubernetes.io/projected/a3e96b76-cf58-4b94-9e89-5006581983b5-kube-api-access-dkqsf\") pod \"glance-db-create-7spvf\" (UID: \"a3e96b76-cf58-4b94-9e89-5006581983b5\") " pod="glance-kuttl-tests/glance-db-create-7spvf" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.156635 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.156672 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-scripts\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.156810 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cr569\" (UniqueName: \"kubernetes.io/projected/7bed4500-d8df-453f-93b4-f2d093d81138-kube-api-access-cr569\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.164198 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-7spvf"] Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.257920 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cr569\" (UniqueName: \"kubernetes.io/projected/7bed4500-d8df-453f-93b4-f2d093d81138-kube-api-access-cr569\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.258029 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config-secret\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.258062 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkqsf\" (UniqueName: \"kubernetes.io/projected/a3e96b76-cf58-4b94-9e89-5006581983b5-kube-api-access-dkqsf\") pod \"glance-db-create-7spvf\" (UID: \"a3e96b76-cf58-4b94-9e89-5006581983b5\") " pod="glance-kuttl-tests/glance-db-create-7spvf" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.258087 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.258969 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.259706 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-scripts\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.260338 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-scripts\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.264258 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config-secret\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.273768 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkqsf\" (UniqueName: \"kubernetes.io/projected/a3e96b76-cf58-4b94-9e89-5006581983b5-kube-api-access-dkqsf\") pod \"glance-db-create-7spvf\" (UID: \"a3e96b76-cf58-4b94-9e89-5006581983b5\") " pod="glance-kuttl-tests/glance-db-create-7spvf" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.280897 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cr569\" (UniqueName: \"kubernetes.io/projected/7bed4500-d8df-453f-93b4-f2d093d81138-kube-api-access-cr569\") pod \"openstackclient\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.411433 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.462623 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-7spvf" Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.914552 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:34:09 crc kubenswrapper[4747]: W1001 06:34:09.941958 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bed4500_d8df_453f_93b4_f2d093d81138.slice/crio-a1fa009e391a114033e2f392b9578be6d3f5f0df13c433b0a5935d0d34e26916 WatchSource:0}: Error finding container a1fa009e391a114033e2f392b9578be6d3f5f0df13c433b0a5935d0d34e26916: Status 404 returned error can't find the container with id a1fa009e391a114033e2f392b9578be6d3f5f0df13c433b0a5935d0d34e26916 Oct 01 06:34:09 crc kubenswrapper[4747]: I1001 06:34:09.978810 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-7spvf"] Oct 01 06:34:09 crc kubenswrapper[4747]: W1001 06:34:09.990484 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3e96b76_cf58_4b94_9e89_5006581983b5.slice/crio-264ad0ccb909ed477c3f0a62cac7c75533a29abf860de33b4ecf5cc1b6846163 WatchSource:0}: Error finding container 264ad0ccb909ed477c3f0a62cac7c75533a29abf860de33b4ecf5cc1b6846163: Status 404 returned error can't find the container with id 264ad0ccb909ed477c3f0a62cac7c75533a29abf860de33b4ecf5cc1b6846163 Oct 01 06:34:10 crc kubenswrapper[4747]: I1001 06:34:10.635743 4747 generic.go:334] "Generic (PLEG): container finished" podID="a3e96b76-cf58-4b94-9e89-5006581983b5" containerID="c1056f3e321ccb923e89f694601b4c7ade2c541ced152d34b6947e4499b2802d" exitCode=0 Oct 01 06:34:10 crc kubenswrapper[4747]: I1001 06:34:10.635849 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-7spvf" event={"ID":"a3e96b76-cf58-4b94-9e89-5006581983b5","Type":"ContainerDied","Data":"c1056f3e321ccb923e89f694601b4c7ade2c541ced152d34b6947e4499b2802d"} Oct 01 06:34:10 crc kubenswrapper[4747]: I1001 06:34:10.636187 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-7spvf" event={"ID":"a3e96b76-cf58-4b94-9e89-5006581983b5","Type":"ContainerStarted","Data":"264ad0ccb909ed477c3f0a62cac7c75533a29abf860de33b4ecf5cc1b6846163"} Oct 01 06:34:10 crc kubenswrapper[4747]: I1001 06:34:10.638050 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"7bed4500-d8df-453f-93b4-f2d093d81138","Type":"ContainerStarted","Data":"a1fa009e391a114033e2f392b9578be6d3f5f0df13c433b0a5935d0d34e26916"} Oct 01 06:34:11 crc kubenswrapper[4747]: I1001 06:34:11.983513 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-7spvf" Oct 01 06:34:12 crc kubenswrapper[4747]: I1001 06:34:12.105876 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkqsf\" (UniqueName: \"kubernetes.io/projected/a3e96b76-cf58-4b94-9e89-5006581983b5-kube-api-access-dkqsf\") pod \"a3e96b76-cf58-4b94-9e89-5006581983b5\" (UID: \"a3e96b76-cf58-4b94-9e89-5006581983b5\") " Oct 01 06:34:12 crc kubenswrapper[4747]: I1001 06:34:12.130904 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3e96b76-cf58-4b94-9e89-5006581983b5-kube-api-access-dkqsf" (OuterVolumeSpecName: "kube-api-access-dkqsf") pod "a3e96b76-cf58-4b94-9e89-5006581983b5" (UID: "a3e96b76-cf58-4b94-9e89-5006581983b5"). InnerVolumeSpecName "kube-api-access-dkqsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:34:12 crc kubenswrapper[4747]: I1001 06:34:12.207298 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkqsf\" (UniqueName: \"kubernetes.io/projected/a3e96b76-cf58-4b94-9e89-5006581983b5-kube-api-access-dkqsf\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:12 crc kubenswrapper[4747]: I1001 06:34:12.668415 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-7spvf" event={"ID":"a3e96b76-cf58-4b94-9e89-5006581983b5","Type":"ContainerDied","Data":"264ad0ccb909ed477c3f0a62cac7c75533a29abf860de33b4ecf5cc1b6846163"} Oct 01 06:34:12 crc kubenswrapper[4747]: I1001 06:34:12.668454 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="264ad0ccb909ed477c3f0a62cac7c75533a29abf860de33b4ecf5cc1b6846163" Oct 01 06:34:12 crc kubenswrapper[4747]: I1001 06:34:12.668473 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-7spvf" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.144827 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-2c77-account-create-9t2bh"] Oct 01 06:34:19 crc kubenswrapper[4747]: E1001 06:34:19.146097 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e96b76-cf58-4b94-9e89-5006581983b5" containerName="mariadb-database-create" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.146125 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e96b76-cf58-4b94-9e89-5006581983b5" containerName="mariadb-database-create" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.146481 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e96b76-cf58-4b94-9e89-5006581983b5" containerName="mariadb-database-create" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.147542 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.149830 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.159135 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-2c77-account-create-9t2bh"] Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.215650 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5bq6\" (UniqueName: \"kubernetes.io/projected/edddd2e6-c3e8-47c9-bdba-43548ef70d0c-kube-api-access-d5bq6\") pod \"glance-2c77-account-create-9t2bh\" (UID: \"edddd2e6-c3e8-47c9-bdba-43548ef70d0c\") " pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.317363 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5bq6\" (UniqueName: \"kubernetes.io/projected/edddd2e6-c3e8-47c9-bdba-43548ef70d0c-kube-api-access-d5bq6\") pod \"glance-2c77-account-create-9t2bh\" (UID: \"edddd2e6-c3e8-47c9-bdba-43548ef70d0c\") " pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.365275 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5bq6\" (UniqueName: \"kubernetes.io/projected/edddd2e6-c3e8-47c9-bdba-43548ef70d0c-kube-api-access-d5bq6\") pod \"glance-2c77-account-create-9t2bh\" (UID: \"edddd2e6-c3e8-47c9-bdba-43548ef70d0c\") " pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.462954 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.735126 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"7bed4500-d8df-453f-93b4-f2d093d81138","Type":"ContainerStarted","Data":"5ead492a2a2b059bd7da20b7ee096a5c9b6f117b8bfff6475b22c1167fe075cd"} Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.760887 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstackclient" podStartSLOduration=2.181162151 podStartE2EDuration="10.760870294s" podCreationTimestamp="2025-10-01 06:34:09 +0000 UTC" firstStartedPulling="2025-10-01 06:34:09.944996506 +0000 UTC m=+1051.354653555" lastFinishedPulling="2025-10-01 06:34:18.524704609 +0000 UTC m=+1059.934361698" observedRunningTime="2025-10-01 06:34:19.751081755 +0000 UTC m=+1061.160738814" watchObservedRunningTime="2025-10-01 06:34:19.760870294 +0000 UTC m=+1061.170527353" Oct 01 06:34:19 crc kubenswrapper[4747]: W1001 06:34:19.765394 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podedddd2e6_c3e8_47c9_bdba_43548ef70d0c.slice/crio-0a05fe328eff375ca7c5bce28f229b2679b652d347895ef329600120b249e6c4 WatchSource:0}: Error finding container 0a05fe328eff375ca7c5bce28f229b2679b652d347895ef329600120b249e6c4: Status 404 returned error can't find the container with id 0a05fe328eff375ca7c5bce28f229b2679b652d347895ef329600120b249e6c4 Oct 01 06:34:19 crc kubenswrapper[4747]: I1001 06:34:19.767034 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-2c77-account-create-9t2bh"] Oct 01 06:34:20 crc kubenswrapper[4747]: I1001 06:34:20.746274 4747 generic.go:334] "Generic (PLEG): container finished" podID="edddd2e6-c3e8-47c9-bdba-43548ef70d0c" containerID="9bacb400e449c2887aa66bf6901b6e923d972b681eed78d474e9d646edb96ade" exitCode=0 Oct 01 06:34:20 crc kubenswrapper[4747]: I1001 06:34:20.746396 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" event={"ID":"edddd2e6-c3e8-47c9-bdba-43548ef70d0c","Type":"ContainerDied","Data":"9bacb400e449c2887aa66bf6901b6e923d972b681eed78d474e9d646edb96ade"} Oct 01 06:34:20 crc kubenswrapper[4747]: I1001 06:34:20.746679 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" event={"ID":"edddd2e6-c3e8-47c9-bdba-43548ef70d0c","Type":"ContainerStarted","Data":"0a05fe328eff375ca7c5bce28f229b2679b652d347895ef329600120b249e6c4"} Oct 01 06:34:22 crc kubenswrapper[4747]: I1001 06:34:22.155337 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" Oct 01 06:34:22 crc kubenswrapper[4747]: I1001 06:34:22.263837 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5bq6\" (UniqueName: \"kubernetes.io/projected/edddd2e6-c3e8-47c9-bdba-43548ef70d0c-kube-api-access-d5bq6\") pod \"edddd2e6-c3e8-47c9-bdba-43548ef70d0c\" (UID: \"edddd2e6-c3e8-47c9-bdba-43548ef70d0c\") " Oct 01 06:34:22 crc kubenswrapper[4747]: I1001 06:34:22.273057 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edddd2e6-c3e8-47c9-bdba-43548ef70d0c-kube-api-access-d5bq6" (OuterVolumeSpecName: "kube-api-access-d5bq6") pod "edddd2e6-c3e8-47c9-bdba-43548ef70d0c" (UID: "edddd2e6-c3e8-47c9-bdba-43548ef70d0c"). InnerVolumeSpecName "kube-api-access-d5bq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:34:22 crc kubenswrapper[4747]: I1001 06:34:22.365887 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5bq6\" (UniqueName: \"kubernetes.io/projected/edddd2e6-c3e8-47c9-bdba-43548ef70d0c-kube-api-access-d5bq6\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:22 crc kubenswrapper[4747]: I1001 06:34:22.767281 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" event={"ID":"edddd2e6-c3e8-47c9-bdba-43548ef70d0c","Type":"ContainerDied","Data":"0a05fe328eff375ca7c5bce28f229b2679b652d347895ef329600120b249e6c4"} Oct 01 06:34:22 crc kubenswrapper[4747]: I1001 06:34:22.767353 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a05fe328eff375ca7c5bce28f229b2679b652d347895ef329600120b249e6c4" Oct 01 06:34:22 crc kubenswrapper[4747]: I1001 06:34:22.767385 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-2c77-account-create-9t2bh" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.209328 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-9mm6j"] Oct 01 06:34:24 crc kubenswrapper[4747]: E1001 06:34:24.209908 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edddd2e6-c3e8-47c9-bdba-43548ef70d0c" containerName="mariadb-account-create" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.209923 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="edddd2e6-c3e8-47c9-bdba-43548ef70d0c" containerName="mariadb-account-create" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.210122 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="edddd2e6-c3e8-47c9-bdba-43548ef70d0c" containerName="mariadb-account-create" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.210656 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.213332 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-8p2wf" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.213714 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.232229 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-9mm6j"] Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.295725 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-config-data\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.295936 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5f9p\" (UniqueName: \"kubernetes.io/projected/befef3e0-16ac-4eaa-ad04-f207aee8c495-kube-api-access-x5f9p\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.296010 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-db-sync-config-data\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.397777 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5f9p\" (UniqueName: \"kubernetes.io/projected/befef3e0-16ac-4eaa-ad04-f207aee8c495-kube-api-access-x5f9p\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.397841 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-db-sync-config-data\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.398001 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-config-data\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.413066 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-db-sync-config-data\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.413145 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-config-data\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.415531 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5f9p\" (UniqueName: \"kubernetes.io/projected/befef3e0-16ac-4eaa-ad04-f207aee8c495-kube-api-access-x5f9p\") pod \"glance-db-sync-9mm6j\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.540159 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:24 crc kubenswrapper[4747]: I1001 06:34:24.998919 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-9mm6j"] Oct 01 06:34:25 crc kubenswrapper[4747]: W1001 06:34:25.004882 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbefef3e0_16ac_4eaa_ad04_f207aee8c495.slice/crio-e9781e30b8362c200dc1c82a9dbaff87ef51cd13ca46cd5973ba92c1bd0677ec WatchSource:0}: Error finding container e9781e30b8362c200dc1c82a9dbaff87ef51cd13ca46cd5973ba92c1bd0677ec: Status 404 returned error can't find the container with id e9781e30b8362c200dc1c82a9dbaff87ef51cd13ca46cd5973ba92c1bd0677ec Oct 01 06:34:25 crc kubenswrapper[4747]: I1001 06:34:25.789533 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-9mm6j" event={"ID":"befef3e0-16ac-4eaa-ad04-f207aee8c495","Type":"ContainerStarted","Data":"e9781e30b8362c200dc1c82a9dbaff87ef51cd13ca46cd5973ba92c1bd0677ec"} Oct 01 06:34:38 crc kubenswrapper[4747]: I1001 06:34:38.899693 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-9mm6j" event={"ID":"befef3e0-16ac-4eaa-ad04-f207aee8c495","Type":"ContainerStarted","Data":"2abe0eb82b02c8683238645da1672e29d59cb001d74341bab44f69470f7ddaf3"} Oct 01 06:34:38 crc kubenswrapper[4747]: I1001 06:34:38.913925 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-9mm6j" podStartSLOduration=2.489665006 podStartE2EDuration="14.913893097s" podCreationTimestamp="2025-10-01 06:34:24 +0000 UTC" firstStartedPulling="2025-10-01 06:34:25.007259196 +0000 UTC m=+1066.416916245" lastFinishedPulling="2025-10-01 06:34:37.431487297 +0000 UTC m=+1078.841144336" observedRunningTime="2025-10-01 06:34:38.912223287 +0000 UTC m=+1080.321880346" watchObservedRunningTime="2025-10-01 06:34:38.913893097 +0000 UTC m=+1080.323550146" Oct 01 06:34:52 crc kubenswrapper[4747]: I1001 06:34:52.024487 4747 generic.go:334] "Generic (PLEG): container finished" podID="befef3e0-16ac-4eaa-ad04-f207aee8c495" containerID="2abe0eb82b02c8683238645da1672e29d59cb001d74341bab44f69470f7ddaf3" exitCode=0 Oct 01 06:34:52 crc kubenswrapper[4747]: I1001 06:34:52.024636 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-9mm6j" event={"ID":"befef3e0-16ac-4eaa-ad04-f207aee8c495","Type":"ContainerDied","Data":"2abe0eb82b02c8683238645da1672e29d59cb001d74341bab44f69470f7ddaf3"} Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.355978 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.498943 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5f9p\" (UniqueName: \"kubernetes.io/projected/befef3e0-16ac-4eaa-ad04-f207aee8c495-kube-api-access-x5f9p\") pod \"befef3e0-16ac-4eaa-ad04-f207aee8c495\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.499086 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-config-data\") pod \"befef3e0-16ac-4eaa-ad04-f207aee8c495\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.499174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-db-sync-config-data\") pod \"befef3e0-16ac-4eaa-ad04-f207aee8c495\" (UID: \"befef3e0-16ac-4eaa-ad04-f207aee8c495\") " Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.506908 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "befef3e0-16ac-4eaa-ad04-f207aee8c495" (UID: "befef3e0-16ac-4eaa-ad04-f207aee8c495"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.507174 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/befef3e0-16ac-4eaa-ad04-f207aee8c495-kube-api-access-x5f9p" (OuterVolumeSpecName: "kube-api-access-x5f9p") pod "befef3e0-16ac-4eaa-ad04-f207aee8c495" (UID: "befef3e0-16ac-4eaa-ad04-f207aee8c495"). InnerVolumeSpecName "kube-api-access-x5f9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.567811 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-config-data" (OuterVolumeSpecName: "config-data") pod "befef3e0-16ac-4eaa-ad04-f207aee8c495" (UID: "befef3e0-16ac-4eaa-ad04-f207aee8c495"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.600792 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5f9p\" (UniqueName: \"kubernetes.io/projected/befef3e0-16ac-4eaa-ad04-f207aee8c495-kube-api-access-x5f9p\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.600833 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:53 crc kubenswrapper[4747]: I1001 06:34:53.600846 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/befef3e0-16ac-4eaa-ad04-f207aee8c495-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:54 crc kubenswrapper[4747]: I1001 06:34:54.049284 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-9mm6j" event={"ID":"befef3e0-16ac-4eaa-ad04-f207aee8c495","Type":"ContainerDied","Data":"e9781e30b8362c200dc1c82a9dbaff87ef51cd13ca46cd5973ba92c1bd0677ec"} Oct 01 06:34:54 crc kubenswrapper[4747]: I1001 06:34:54.049341 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9781e30b8362c200dc1c82a9dbaff87ef51cd13ca46cd5973ba92c1bd0677ec" Oct 01 06:34:54 crc kubenswrapper[4747]: I1001 06:34:54.049433 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-9mm6j" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.407448 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:34:55 crc kubenswrapper[4747]: E1001 06:34:55.408250 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="befef3e0-16ac-4eaa-ad04-f207aee8c495" containerName="glance-db-sync" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.408271 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="befef3e0-16ac-4eaa-ad04-f207aee8c495" containerName="glance-db-sync" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.408517 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="befef3e0-16ac-4eaa-ad04-f207aee8c495" containerName="glance-db-sync" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.444059 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.444182 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.448413 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-8p2wf" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.448641 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.448774 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.450721 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.452759 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.465029 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532379 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-scripts\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532415 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-scripts\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532432 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532459 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532477 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-sys\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532705 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532802 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-logs\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532833 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-dev\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532850 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532868 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-run\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532891 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-sys\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532909 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.532936 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-lib-modules\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533083 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-logs\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533154 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533187 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-run\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533220 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-config-data\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533241 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-nvme\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533356 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-dev\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533405 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtf8t\" (UniqueName: \"kubernetes.io/projected/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-kube-api-access-jtf8t\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533462 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-httpd-run\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533482 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533500 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf9mt\" (UniqueName: \"kubernetes.io/projected/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-kube-api-access-rf9mt\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533559 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-httpd-run\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533622 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-lib-modules\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533663 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-config-data\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533701 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.533762 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-nvme\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.635983 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-logs\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636074 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636124 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-run\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-config-data\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636197 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-nvme\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636245 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-dev\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636284 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtf8t\" (UniqueName: \"kubernetes.io/projected/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-kube-api-access-jtf8t\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636324 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-httpd-run\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636328 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-dev\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636359 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636373 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-nvme\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636396 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf9mt\" (UniqueName: \"kubernetes.io/projected/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-kube-api-access-rf9mt\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636234 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-run\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636479 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-httpd-run\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636546 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-lib-modules\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636543 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636607 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-config-data\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636656 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636678 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-lib-modules\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636689 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-nvme\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636743 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-scripts\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636782 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-nvme\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636794 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-scripts\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636838 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636917 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636963 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-sys\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636979 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-httpd-run\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.636770 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-httpd-run\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637038 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637100 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-logs\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637163 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-dev\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637193 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637235 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-run\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637244 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") device mount path \"/mnt/openstack/pv02\"" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637510 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-logs\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637574 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") device mount path \"/mnt/openstack/pv01\"" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637605 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-sys\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637736 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637807 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") device mount path \"/mnt/openstack/pv11\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637822 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-dev\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637195 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") device mount path \"/mnt/openstack/pv10\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637860 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637889 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-run\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.637987 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-sys\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.638013 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.638035 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-lib-modules\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.638141 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-lib-modules\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.638196 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.638208 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-sys\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.642008 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-logs\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.642566 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-scripts\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.644270 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-scripts\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.644990 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-config-data\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.660732 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-config-data\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.670496 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtf8t\" (UniqueName: \"kubernetes.io/projected/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-kube-api-access-jtf8t\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.685791 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf9mt\" (UniqueName: \"kubernetes.io/projected/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-kube-api-access-rf9mt\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.687304 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.694681 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.697854 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-single-1\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.704386 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.765928 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:34:55 crc kubenswrapper[4747]: I1001 06:34:55.774596 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:56 crc kubenswrapper[4747]: I1001 06:34:56.009835 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:56 crc kubenswrapper[4747]: I1001 06:34:56.048476 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:56 crc kubenswrapper[4747]: I1001 06:34:56.076141 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:34:56 crc kubenswrapper[4747]: W1001 06:34:56.091573 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0f89004_45a0_4fc9_9e0f_1455ed9e9299.slice/crio-86474dc23a1b07f4488ce8c50dad8ae9a5d91f909f0280fcf94d420beacf1a9f WatchSource:0}: Error finding container 86474dc23a1b07f4488ce8c50dad8ae9a5d91f909f0280fcf94d420beacf1a9f: Status 404 returned error can't find the container with id 86474dc23a1b07f4488ce8c50dad8ae9a5d91f909f0280fcf94d420beacf1a9f Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.075072 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"6bf83f1e-1f35-4536-bfdb-c03ad69232fd","Type":"ContainerStarted","Data":"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c"} Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.075725 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"6bf83f1e-1f35-4536-bfdb-c03ad69232fd","Type":"ContainerStarted","Data":"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9"} Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.075749 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"6bf83f1e-1f35-4536-bfdb-c03ad69232fd","Type":"ContainerStarted","Data":"9ddf0437fd303b5955edd791c155bb428397b31481707544ad4bf056f9f43b8e"} Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.075129 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerName="glance-log" containerID="cri-o://dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9" gracePeriod=30 Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.075252 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerName="glance-httpd" containerID="cri-o://118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c" gracePeriod=30 Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.086493 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"d0f89004-45a0-4fc9-9e0f-1455ed9e9299","Type":"ContainerStarted","Data":"d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef"} Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.086779 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"d0f89004-45a0-4fc9-9e0f-1455ed9e9299","Type":"ContainerStarted","Data":"0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60"} Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.086803 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"d0f89004-45a0-4fc9-9e0f-1455ed9e9299","Type":"ContainerStarted","Data":"86474dc23a1b07f4488ce8c50dad8ae9a5d91f909f0280fcf94d420beacf1a9f"} Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.113658 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-1" podStartSLOduration=3.113575462 podStartE2EDuration="3.113575462s" podCreationTimestamp="2025-10-01 06:34:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:34:57.105953166 +0000 UTC m=+1098.515610255" watchObservedRunningTime="2025-10-01 06:34:57.113575462 +0000 UTC m=+1098.523239611" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.141952 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=3.141923815 podStartE2EDuration="3.141923815s" podCreationTimestamp="2025-10-01 06:34:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:34:57.140007997 +0000 UTC m=+1098.549665086" watchObservedRunningTime="2025-10-01 06:34:57.141923815 +0000 UTC m=+1098.551580904" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.455209 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583155 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-var-locks-brick\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583235 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-scripts\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583260 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-lib-modules\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583284 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583330 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-run\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583327 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583360 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-nvme\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583421 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-logs\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583449 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583487 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-sys\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583486 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-run" (OuterVolumeSpecName: "run") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583515 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-iscsi\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583575 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583576 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583603 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-sys" (OuterVolumeSpecName: "sys") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583634 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-dev\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583709 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-dev" (OuterVolumeSpecName: "dev") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583824 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-config-data\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583905 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtf8t\" (UniqueName: \"kubernetes.io/projected/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-kube-api-access-jtf8t\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.583984 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-httpd-run\") pod \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\" (UID: \"6bf83f1e-1f35-4536-bfdb-c03ad69232fd\") " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.584023 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-logs" (OuterVolumeSpecName: "logs") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.584121 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.584438 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.584859 4747 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.584915 4747 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.584947 4747 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.584976 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-logs\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.585001 4747 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-sys\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.585027 4747 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.585053 4747 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-dev\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.585079 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.585104 4747 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.590137 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance-cache") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.590556 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-kube-api-access-jtf8t" (OuterVolumeSpecName: "kube-api-access-jtf8t") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "kube-api-access-jtf8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.590704 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-scripts" (OuterVolumeSpecName: "scripts") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.590928 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.641983 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-config-data" (OuterVolumeSpecName: "config-data") pod "6bf83f1e-1f35-4536-bfdb-c03ad69232fd" (UID: "6bf83f1e-1f35-4536-bfdb-c03ad69232fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.687348 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.687462 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.687494 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.687514 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.687537 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtf8t\" (UniqueName: \"kubernetes.io/projected/6bf83f1e-1f35-4536-bfdb-c03ad69232fd-kube-api-access-jtf8t\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.707671 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.709384 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.789295 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:57 crc kubenswrapper[4747]: I1001 06:34:57.789329 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.097100 4747 generic.go:334] "Generic (PLEG): container finished" podID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerID="118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c" exitCode=143 Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.097310 4747 generic.go:334] "Generic (PLEG): container finished" podID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerID="dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9" exitCode=143 Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.097377 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.097340 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"6bf83f1e-1f35-4536-bfdb-c03ad69232fd","Type":"ContainerDied","Data":"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c"} Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.097440 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"6bf83f1e-1f35-4536-bfdb-c03ad69232fd","Type":"ContainerDied","Data":"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9"} Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.097461 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"6bf83f1e-1f35-4536-bfdb-c03ad69232fd","Type":"ContainerDied","Data":"9ddf0437fd303b5955edd791c155bb428397b31481707544ad4bf056f9f43b8e"} Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.097483 4747 scope.go:117] "RemoveContainer" containerID="118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.115367 4747 scope.go:117] "RemoveContainer" containerID="dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.133928 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.136102 4747 scope.go:117] "RemoveContainer" containerID="118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c" Oct 01 06:34:58 crc kubenswrapper[4747]: E1001 06:34:58.138816 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c\": container with ID starting with 118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c not found: ID does not exist" containerID="118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.138879 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c"} err="failed to get container status \"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c\": rpc error: code = NotFound desc = could not find container \"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c\": container with ID starting with 118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c not found: ID does not exist" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.138915 4747 scope.go:117] "RemoveContainer" containerID="dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.139111 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:58 crc kubenswrapper[4747]: E1001 06:34:58.144037 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9\": container with ID starting with dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9 not found: ID does not exist" containerID="dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.144093 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9"} err="failed to get container status \"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9\": rpc error: code = NotFound desc = could not find container \"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9\": container with ID starting with dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9 not found: ID does not exist" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.144124 4747 scope.go:117] "RemoveContainer" containerID="118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.148858 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c"} err="failed to get container status \"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c\": rpc error: code = NotFound desc = could not find container \"118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c\": container with ID starting with 118356981f8b72fa7894fd3cbb54c65a90f743c401120741d228820dfc83a03c not found: ID does not exist" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.148898 4747 scope.go:117] "RemoveContainer" containerID="dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.149573 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9"} err="failed to get container status \"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9\": rpc error: code = NotFound desc = could not find container \"dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9\": container with ID starting with dc0fd9b5395f36d0f272530c8524a42f97768ce994519439515de3492ddc04d9 not found: ID does not exist" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.155815 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:58 crc kubenswrapper[4747]: E1001 06:34:58.156131 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerName="glance-httpd" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.156151 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerName="glance-httpd" Oct 01 06:34:58 crc kubenswrapper[4747]: E1001 06:34:58.156185 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerName="glance-log" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.156194 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerName="glance-log" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.156386 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerName="glance-log" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.156426 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" containerName="glance-httpd" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.157414 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.172796 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195019 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-run\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195057 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195078 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-config-data\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195106 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-logs\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195125 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-lib-modules\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195154 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-nvme\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195175 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k2tr\" (UniqueName: \"kubernetes.io/projected/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-kube-api-access-8k2tr\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195191 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195252 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-dev\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195281 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-scripts\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195489 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-httpd-run\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195612 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195735 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.195824 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-sys\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299237 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-logs\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299295 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-lib-modules\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299335 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-nvme\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299368 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k2tr\" (UniqueName: \"kubernetes.io/projected/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-kube-api-access-8k2tr\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299392 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299420 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-dev\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299429 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-lib-modules\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299475 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-nvme\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299451 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-scripts\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299560 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-var-locks-brick\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299569 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-dev\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299657 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-httpd-run\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299696 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299720 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299769 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-iscsi\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299788 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-sys\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299821 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-sys\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299770 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-logs\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299856 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-run\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299880 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299906 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-config-data\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.299945 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-run\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.300039 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") device mount path \"/mnt/openstack/pv02\"" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.300225 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") device mount path \"/mnt/openstack/pv01\"" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.300238 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-httpd-run\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.310567 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-config-data\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.320483 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-scripts\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.334410 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k2tr\" (UniqueName: \"kubernetes.io/projected/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-kube-api-access-8k2tr\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.336826 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.345137 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"glance-default-single-1\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.473386 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:34:58 crc kubenswrapper[4747]: I1001 06:34:58.748977 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:34:58 crc kubenswrapper[4747]: W1001 06:34:58.755158 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod472d162f_3bf2_4a8f_a0d3_e85f452f07f7.slice/crio-32b4143179d5fe4700c03708b78df4b21b5b6c41c31a54b8c3bae644bbdb1ef6 WatchSource:0}: Error finding container 32b4143179d5fe4700c03708b78df4b21b5b6c41c31a54b8c3bae644bbdb1ef6: Status 404 returned error can't find the container with id 32b4143179d5fe4700c03708b78df4b21b5b6c41c31a54b8c3bae644bbdb1ef6 Oct 01 06:34:59 crc kubenswrapper[4747]: I1001 06:34:59.115339 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"472d162f-3bf2-4a8f-a0d3-e85f452f07f7","Type":"ContainerStarted","Data":"7c5dcbebb946d5becfdfb05069ee77616a38999601dd758631f2beb699bd1df1"} Oct 01 06:34:59 crc kubenswrapper[4747]: I1001 06:34:59.115703 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"472d162f-3bf2-4a8f-a0d3-e85f452f07f7","Type":"ContainerStarted","Data":"2134368fbe4502166120232a8f72de91dc4a379c115c627e58e1556f8a423cef"} Oct 01 06:34:59 crc kubenswrapper[4747]: I1001 06:34:59.115725 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"472d162f-3bf2-4a8f-a0d3-e85f452f07f7","Type":"ContainerStarted","Data":"32b4143179d5fe4700c03708b78df4b21b5b6c41c31a54b8c3bae644bbdb1ef6"} Oct 01 06:34:59 crc kubenswrapper[4747]: I1001 06:34:59.145151 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-1" podStartSLOduration=1.145132176 podStartE2EDuration="1.145132176s" podCreationTimestamp="2025-10-01 06:34:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:34:59.139198191 +0000 UTC m=+1100.548855250" watchObservedRunningTime="2025-10-01 06:34:59.145132176 +0000 UTC m=+1100.554789235" Oct 01 06:34:59 crc kubenswrapper[4747]: I1001 06:34:59.293802 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bf83f1e-1f35-4536-bfdb-c03ad69232fd" path="/var/lib/kubelet/pods/6bf83f1e-1f35-4536-bfdb-c03ad69232fd/volumes" Oct 01 06:35:05 crc kubenswrapper[4747]: I1001 06:35:05.766420 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:05 crc kubenswrapper[4747]: I1001 06:35:05.767460 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:05 crc kubenswrapper[4747]: I1001 06:35:05.815453 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:05 crc kubenswrapper[4747]: I1001 06:35:05.843041 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:06 crc kubenswrapper[4747]: I1001 06:35:06.181313 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:06 crc kubenswrapper[4747]: I1001 06:35:06.181723 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:08 crc kubenswrapper[4747]: I1001 06:35:08.196769 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:35:08 crc kubenswrapper[4747]: I1001 06:35:08.196988 4747 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 01 06:35:08 crc kubenswrapper[4747]: I1001 06:35:08.474189 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:08 crc kubenswrapper[4747]: I1001 06:35:08.474273 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:08 crc kubenswrapper[4747]: I1001 06:35:08.513892 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:08 crc kubenswrapper[4747]: I1001 06:35:08.543455 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:08 crc kubenswrapper[4747]: I1001 06:35:08.733336 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:08 crc kubenswrapper[4747]: I1001 06:35:08.734822 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:09 crc kubenswrapper[4747]: I1001 06:35:09.210646 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:09 crc kubenswrapper[4747]: I1001 06:35:09.210716 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:11 crc kubenswrapper[4747]: I1001 06:35:11.074376 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:11 crc kubenswrapper[4747]: I1001 06:35:11.091502 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:11 crc kubenswrapper[4747]: I1001 06:35:11.171666 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:11 crc kubenswrapper[4747]: I1001 06:35:11.227636 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerName="glance-log" containerID="cri-o://0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60" gracePeriod=30 Oct 01 06:35:11 crc kubenswrapper[4747]: I1001 06:35:11.227879 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerName="glance-httpd" containerID="cri-o://d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef" gracePeriod=30 Oct 01 06:35:12 crc kubenswrapper[4747]: I1001 06:35:12.239934 4747 generic.go:334] "Generic (PLEG): container finished" podID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerID="0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60" exitCode=143 Oct 01 06:35:12 crc kubenswrapper[4747]: I1001 06:35:12.241584 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"d0f89004-45a0-4fc9-9e0f-1455ed9e9299","Type":"ContainerDied","Data":"0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60"} Oct 01 06:35:14 crc kubenswrapper[4747]: I1001 06:35:14.851184 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:14 crc kubenswrapper[4747]: I1001 06:35:14.931654 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-config-data\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:14 crc kubenswrapper[4747]: I1001 06:35:14.931734 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-iscsi\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:14 crc kubenswrapper[4747]: I1001 06:35:14.931853 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:14 crc kubenswrapper[4747]: I1001 06:35:14.932270 4747 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:14 crc kubenswrapper[4747]: I1001 06:35:14.974998 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-config-data" (OuterVolumeSpecName: "config-data") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033090 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-dev\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033144 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033174 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033203 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf9mt\" (UniqueName: \"kubernetes.io/projected/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-kube-api-access-rf9mt\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033205 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-dev" (OuterVolumeSpecName: "dev") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033263 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-httpd-run\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033334 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-scripts\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033375 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-logs\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033401 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-sys\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033484 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-nvme\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033506 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-lib-modules\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033525 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-var-locks-brick\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033546 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-run\") pod \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\" (UID: \"d0f89004-45a0-4fc9-9e0f-1455ed9e9299\") " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033735 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.033812 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-run" (OuterVolumeSpecName: "run") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.034132 4747 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.034152 4747 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-dev\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.034163 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.034178 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.036934 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-scripts" (OuterVolumeSpecName: "scripts") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.037369 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-logs" (OuterVolumeSpecName: "logs") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.037404 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-sys" (OuterVolumeSpecName: "sys") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.037420 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.037435 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.037512 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.037721 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-kube-api-access-rf9mt" (OuterVolumeSpecName: "kube-api-access-rf9mt") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "kube-api-access-rf9mt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.037838 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.038190 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance-cache") pod "d0f89004-45a0-4fc9-9e0f-1455ed9e9299" (UID: "d0f89004-45a0-4fc9-9e0f-1455ed9e9299"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135591 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135638 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-logs\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135654 4747 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-sys\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135666 4747 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135679 4747 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135690 4747 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135728 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135745 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.135778 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf9mt\" (UniqueName: \"kubernetes.io/projected/d0f89004-45a0-4fc9-9e0f-1455ed9e9299-kube-api-access-rf9mt\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.152733 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.161820 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.236698 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.236969 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.270031 4747 generic.go:334] "Generic (PLEG): container finished" podID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerID="d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef" exitCode=0 Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.270090 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"d0f89004-45a0-4fc9-9e0f-1455ed9e9299","Type":"ContainerDied","Data":"d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef"} Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.270130 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"d0f89004-45a0-4fc9-9e0f-1455ed9e9299","Type":"ContainerDied","Data":"86474dc23a1b07f4488ce8c50dad8ae9a5d91f909f0280fcf94d420beacf1a9f"} Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.270158 4747 scope.go:117] "RemoveContainer" containerID="d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.270321 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.334113 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.334481 4747 scope.go:117] "RemoveContainer" containerID="0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.353828 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.360972 4747 scope.go:117] "RemoveContainer" containerID="d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.362337 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:15 crc kubenswrapper[4747]: E1001 06:35:15.362949 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerName="glance-log" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.362997 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerName="glance-log" Oct 01 06:35:15 crc kubenswrapper[4747]: E1001 06:35:15.363035 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerName="glance-httpd" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.363048 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerName="glance-httpd" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.363298 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerName="glance-log" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.363336 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" containerName="glance-httpd" Oct 01 06:35:15 crc kubenswrapper[4747]: E1001 06:35:15.363795 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef\": container with ID starting with d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef not found: ID does not exist" containerID="d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.363832 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef"} err="failed to get container status \"d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef\": rpc error: code = NotFound desc = could not find container \"d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef\": container with ID starting with d23faf4af9bf211185fa5f51515d2e6ae7033194645582c6c4b347dd9e1642ef not found: ID does not exist" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.363854 4747 scope.go:117] "RemoveContainer" containerID="0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.364781 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: E1001 06:35:15.364828 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60\": container with ID starting with 0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60 not found: ID does not exist" containerID="0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.364850 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60"} err="failed to get container status \"0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60\": rpc error: code = NotFound desc = could not find container \"0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60\": container with ID starting with 0c20ef658361e7eed503dc9726b0257c81bd410d12e5700b6fae1ea17a60aa60 not found: ID does not exist" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.384696 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542182 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542292 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-nvme\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542348 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-config-data\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542418 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542457 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-httpd-run\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542492 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-logs\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542523 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-sys\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542562 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542612 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-lib-modules\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542672 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plgtz\" (UniqueName: \"kubernetes.io/projected/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-kube-api-access-plgtz\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542712 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-run\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542852 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542912 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-scripts\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.542956 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-dev\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644169 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plgtz\" (UniqueName: \"kubernetes.io/projected/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-kube-api-access-plgtz\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644223 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-run\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644271 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644299 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-scripts\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644324 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-dev\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644347 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644387 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-nvme\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644419 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-config-data\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644457 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644482 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-httpd-run\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644501 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-logs\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644521 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-sys\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644542 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644567 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-lib-modules\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644860 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-lib-modules\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644897 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-nvme\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644960 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-run\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.644996 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-iscsi\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.645302 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-dev\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.645829 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-sys\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.645999 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-var-locks-brick\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.646055 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-logs\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.646021 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") device mount path \"/mnt/openstack/pv11\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.646255 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") device mount path \"/mnt/openstack/pv10\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.646287 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-httpd-run\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.649914 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-config-data\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.663401 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-scripts\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.678936 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.683982 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plgtz\" (UniqueName: \"kubernetes.io/projected/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-kube-api-access-plgtz\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.694442 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:15 crc kubenswrapper[4747]: I1001 06:35:15.990479 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:16 crc kubenswrapper[4747]: I1001 06:35:16.286504 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:17 crc kubenswrapper[4747]: I1001 06:35:17.290648 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0f89004-45a0-4fc9-9e0f-1455ed9e9299" path="/var/lib/kubelet/pods/d0f89004-45a0-4fc9-9e0f-1455ed9e9299/volumes" Oct 01 06:35:17 crc kubenswrapper[4747]: I1001 06:35:17.300905 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"eb9a0ae4-d5fd-45ee-866f-68d159a8084c","Type":"ContainerStarted","Data":"f0daa476109dbda9ae5b4da22ae18caf1dd3cc976a74185527104e2075195c60"} Oct 01 06:35:17 crc kubenswrapper[4747]: I1001 06:35:17.300958 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"eb9a0ae4-d5fd-45ee-866f-68d159a8084c","Type":"ContainerStarted","Data":"d068f74862cd71fc0519396aad4f294b909e085a4a5af2a832ff63f2f8c93b58"} Oct 01 06:35:17 crc kubenswrapper[4747]: I1001 06:35:17.300979 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"eb9a0ae4-d5fd-45ee-866f-68d159a8084c","Type":"ContainerStarted","Data":"e2830bb3753d3abd4483ebc39e82b18be50660b1736660a4b8ca87ba1ef4b800"} Oct 01 06:35:25 crc kubenswrapper[4747]: I1001 06:35:25.991325 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:25 crc kubenswrapper[4747]: I1001 06:35:25.993115 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:26 crc kubenswrapper[4747]: I1001 06:35:26.032543 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:26 crc kubenswrapper[4747]: I1001 06:35:26.071518 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=11.071366051 podStartE2EDuration="11.071366051s" podCreationTimestamp="2025-10-01 06:35:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:35:17.347258946 +0000 UTC m=+1118.756916005" watchObservedRunningTime="2025-10-01 06:35:26.071366051 +0000 UTC m=+1127.481023100" Oct 01 06:35:26 crc kubenswrapper[4747]: I1001 06:35:26.091352 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:26 crc kubenswrapper[4747]: I1001 06:35:26.387662 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:26 crc kubenswrapper[4747]: I1001 06:35:26.387700 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:28 crc kubenswrapper[4747]: I1001 06:35:28.348314 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:28 crc kubenswrapper[4747]: I1001 06:35:28.352848 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:44 crc kubenswrapper[4747]: I1001 06:35:44.974321 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-9mm6j"] Oct 01 06:35:44 crc kubenswrapper[4747]: I1001 06:35:44.984181 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-9mm6j"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.046218 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.046484 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerName="glance-log" containerID="cri-o://2134368fbe4502166120232a8f72de91dc4a379c115c627e58e1556f8a423cef" gracePeriod=30 Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.046578 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-1" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerName="glance-httpd" containerID="cri-o://7c5dcbebb946d5becfdfb05069ee77616a38999601dd758631f2beb699bd1df1" gracePeriod=30 Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.058194 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.058521 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-log" containerID="cri-o://d068f74862cd71fc0519396aad4f294b909e085a4a5af2a832ff63f2f8c93b58" gracePeriod=30 Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.058732 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-httpd" containerID="cri-o://f0daa476109dbda9ae5b4da22ae18caf1dd3cc976a74185527104e2075195c60" gracePeriod=30 Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.097829 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance2c77-account-delete-cp2gf"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.098906 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance2c77-account-delete-cp2gf" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.110725 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance2c77-account-delete-cp2gf"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.123956 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-7spvf"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.132822 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-7spvf"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.142068 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-2c77-account-create-9t2bh"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.147866 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-2c77-account-create-9t2bh"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.160379 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance2c77-account-delete-cp2gf"] Oct 01 06:35:45 crc kubenswrapper[4747]: E1001 06:35:45.161232 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-jqkdq], unattached volumes=[], failed to process volumes=[]: context canceled" pod="glance-kuttl-tests/glance2c77-account-delete-cp2gf" podUID="6b78b4e8-ee46-4201-a2e2-43e211d82c6d" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.175363 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.175551 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/openstackclient" podUID="7bed4500-d8df-453f-93b4-f2d093d81138" containerName="openstackclient" containerID="cri-o://5ead492a2a2b059bd7da20b7ee096a5c9b6f117b8bfff6475b22c1167fe075cd" gracePeriod=30 Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.234618 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqkdq\" (UniqueName: \"kubernetes.io/projected/6b78b4e8-ee46-4201-a2e2-43e211d82c6d-kube-api-access-jqkdq\") pod \"glance2c77-account-delete-cp2gf\" (UID: \"6b78b4e8-ee46-4201-a2e2-43e211d82c6d\") " pod="glance-kuttl-tests/glance2c77-account-delete-cp2gf" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.286064 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3e96b76-cf58-4b94-9e89-5006581983b5" path="/var/lib/kubelet/pods/a3e96b76-cf58-4b94-9e89-5006581983b5/volumes" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.286943 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="befef3e0-16ac-4eaa-ad04-f207aee8c495" path="/var/lib/kubelet/pods/befef3e0-16ac-4eaa-ad04-f207aee8c495/volumes" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.287717 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edddd2e6-c3e8-47c9-bdba-43548ef70d0c" path="/var/lib/kubelet/pods/edddd2e6-c3e8-47c9-bdba-43548ef70d0c/volumes" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.336532 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqkdq\" (UniqueName: \"kubernetes.io/projected/6b78b4e8-ee46-4201-a2e2-43e211d82c6d-kube-api-access-jqkdq\") pod \"glance2c77-account-delete-cp2gf\" (UID: \"6b78b4e8-ee46-4201-a2e2-43e211d82c6d\") " pod="glance-kuttl-tests/glance2c77-account-delete-cp2gf" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.368787 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqkdq\" (UniqueName: \"kubernetes.io/projected/6b78b4e8-ee46-4201-a2e2-43e211d82c6d-kube-api-access-jqkdq\") pod \"glance2c77-account-delete-cp2gf\" (UID: \"6b78b4e8-ee46-4201-a2e2-43e211d82c6d\") " pod="glance-kuttl-tests/glance2c77-account-delete-cp2gf" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.575137 4747 generic.go:334] "Generic (PLEG): container finished" podID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerID="d068f74862cd71fc0519396aad4f294b909e085a4a5af2a832ff63f2f8c93b58" exitCode=143 Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.575215 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"eb9a0ae4-d5fd-45ee-866f-68d159a8084c","Type":"ContainerDied","Data":"d068f74862cd71fc0519396aad4f294b909e085a4a5af2a832ff63f2f8c93b58"} Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.576693 4747 generic.go:334] "Generic (PLEG): container finished" podID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerID="2134368fbe4502166120232a8f72de91dc4a379c115c627e58e1556f8a423cef" exitCode=143 Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.576792 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"472d162f-3bf2-4a8f-a0d3-e85f452f07f7","Type":"ContainerDied","Data":"2134368fbe4502166120232a8f72de91dc4a379c115c627e58e1556f8a423cef"} Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.581106 4747 generic.go:334] "Generic (PLEG): container finished" podID="7bed4500-d8df-453f-93b4-f2d093d81138" containerID="5ead492a2a2b059bd7da20b7ee096a5c9b6f117b8bfff6475b22c1167fe075cd" exitCode=143 Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.581181 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance2c77-account-delete-cp2gf" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.581731 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"7bed4500-d8df-453f-93b4-f2d093d81138","Type":"ContainerDied","Data":"5ead492a2a2b059bd7da20b7ee096a5c9b6f117b8bfff6475b22c1167fe075cd"} Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.581783 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"7bed4500-d8df-453f-93b4-f2d093d81138","Type":"ContainerDied","Data":"a1fa009e391a114033e2f392b9578be6d3f5f0df13c433b0a5935d0d34e26916"} Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.581798 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1fa009e391a114033e2f392b9578be6d3f5f0df13c433b0a5935d0d34e26916" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.583060 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.587828 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance2c77-account-delete-cp2gf" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.741429 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config-secret\") pod \"7bed4500-d8df-453f-93b4-f2d093d81138\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.741531 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cr569\" (UniqueName: \"kubernetes.io/projected/7bed4500-d8df-453f-93b4-f2d093d81138-kube-api-access-cr569\") pod \"7bed4500-d8df-453f-93b4-f2d093d81138\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.741574 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config\") pod \"7bed4500-d8df-453f-93b4-f2d093d81138\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.741721 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-scripts\") pod \"7bed4500-d8df-453f-93b4-f2d093d81138\" (UID: \"7bed4500-d8df-453f-93b4-f2d093d81138\") " Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.741846 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqkdq\" (UniqueName: \"kubernetes.io/projected/6b78b4e8-ee46-4201-a2e2-43e211d82c6d-kube-api-access-jqkdq\") pod \"6b78b4e8-ee46-4201-a2e2-43e211d82c6d\" (UID: \"6b78b4e8-ee46-4201-a2e2-43e211d82c6d\") " Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.742196 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-scripts" (OuterVolumeSpecName: "openstack-scripts") pod "7bed4500-d8df-453f-93b4-f2d093d81138" (UID: "7bed4500-d8df-453f-93b4-f2d093d81138"). InnerVolumeSpecName "openstack-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.742331 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.745129 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bed4500-d8df-453f-93b4-f2d093d81138-kube-api-access-cr569" (OuterVolumeSpecName: "kube-api-access-cr569") pod "7bed4500-d8df-453f-93b4-f2d093d81138" (UID: "7bed4500-d8df-453f-93b4-f2d093d81138"). InnerVolumeSpecName "kube-api-access-cr569". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.748418 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b78b4e8-ee46-4201-a2e2-43e211d82c6d-kube-api-access-jqkdq" (OuterVolumeSpecName: "kube-api-access-jqkdq") pod "6b78b4e8-ee46-4201-a2e2-43e211d82c6d" (UID: "6b78b4e8-ee46-4201-a2e2-43e211d82c6d"). InnerVolumeSpecName "kube-api-access-jqkdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.766711 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7bed4500-d8df-453f-93b4-f2d093d81138" (UID: "7bed4500-d8df-453f-93b4-f2d093d81138"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.776502 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7bed4500-d8df-453f-93b4-f2d093d81138" (UID: "7bed4500-d8df-453f-93b4-f2d093d81138"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.843875 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.843912 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cr569\" (UniqueName: \"kubernetes.io/projected/7bed4500-d8df-453f-93b4-f2d093d81138-kube-api-access-cr569\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.843924 4747 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7bed4500-d8df-453f-93b4-f2d093d81138-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:45 crc kubenswrapper[4747]: I1001 06:35:45.843936 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqkdq\" (UniqueName: \"kubernetes.io/projected/6b78b4e8-ee46-4201-a2e2-43e211d82c6d-kube-api-access-jqkdq\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:46 crc kubenswrapper[4747]: I1001 06:35:46.588643 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance2c77-account-delete-cp2gf" Oct 01 06:35:46 crc kubenswrapper[4747]: I1001 06:35:46.588676 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 01 06:35:46 crc kubenswrapper[4747]: I1001 06:35:46.645744 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance2c77-account-delete-cp2gf"] Oct 01 06:35:46 crc kubenswrapper[4747]: I1001 06:35:46.652212 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance2c77-account-delete-cp2gf"] Oct 01 06:35:46 crc kubenswrapper[4747]: I1001 06:35:46.673384 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:35:46 crc kubenswrapper[4747]: I1001 06:35:46.679398 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:35:47 crc kubenswrapper[4747]: I1001 06:35:47.292956 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b78b4e8-ee46-4201-a2e2-43e211d82c6d" path="/var/lib/kubelet/pods/6b78b4e8-ee46-4201-a2e2-43e211d82c6d/volumes" Oct 01 06:35:47 crc kubenswrapper[4747]: I1001 06:35:47.293826 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bed4500-d8df-453f-93b4-f2d093d81138" path="/var/lib/kubelet/pods/7bed4500-d8df-453f-93b4-f2d093d81138/volumes" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.261474 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="glance-kuttl-tests/glance-default-single-0" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-log" probeResult="failure" output="Get \"http://10.217.0.108:9292/healthcheck\": read tcp 10.217.0.2:45410->10.217.0.108:9292: read: connection reset by peer" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.262030 4747 prober.go:107] "Probe failed" probeType="Readiness" pod="glance-kuttl-tests/glance-default-single-0" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-httpd" probeResult="failure" output="Get \"http://10.217.0.108:9292/healthcheck\": read tcp 10.217.0.2:45414->10.217.0.108:9292: read: connection reset by peer" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.615359 4747 generic.go:334] "Generic (PLEG): container finished" podID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerID="f0daa476109dbda9ae5b4da22ae18caf1dd3cc976a74185527104e2075195c60" exitCode=0 Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.615403 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"eb9a0ae4-d5fd-45ee-866f-68d159a8084c","Type":"ContainerDied","Data":"f0daa476109dbda9ae5b4da22ae18caf1dd3cc976a74185527104e2075195c60"} Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.617388 4747 generic.go:334] "Generic (PLEG): container finished" podID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerID="7c5dcbebb946d5becfdfb05069ee77616a38999601dd758631f2beb699bd1df1" exitCode=0 Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.617424 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"472d162f-3bf2-4a8f-a0d3-e85f452f07f7","Type":"ContainerDied","Data":"7c5dcbebb946d5becfdfb05069ee77616a38999601dd758631f2beb699bd1df1"} Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.617438 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-1" event={"ID":"472d162f-3bf2-4a8f-a0d3-e85f452f07f7","Type":"ContainerDied","Data":"32b4143179d5fe4700c03708b78df4b21b5b6c41c31a54b8c3bae644bbdb1ef6"} Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.617449 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32b4143179d5fe4700c03708b78df4b21b5b6c41c31a54b8c3bae644bbdb1ef6" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.668572 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.673201 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795308 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-run\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795353 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-scripts\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795370 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-config-data\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795400 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-logs\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795422 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-scripts\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795436 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-iscsi\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795452 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-httpd-run\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795472 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-lib-modules\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795494 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-logs\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795513 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-lib-modules\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795532 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plgtz\" (UniqueName: \"kubernetes.io/projected/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-kube-api-access-plgtz\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795560 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795578 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795591 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-config-data\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795603 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-run\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795617 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-sys\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795631 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-httpd-run\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795655 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-nvme\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795669 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795681 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-sys\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795900 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-dev\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795919 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-dev\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795934 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-var-locks-brick\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795955 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k2tr\" (UniqueName: \"kubernetes.io/projected/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-kube-api-access-8k2tr\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795980 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-iscsi\") pod \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\" (UID: \"eb9a0ae4-d5fd-45ee-866f-68d159a8084c\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.795997 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-var-locks-brick\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796021 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-nvme\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796049 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\" (UID: \"472d162f-3bf2-4a8f-a0d3-e85f452f07f7\") " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796180 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796238 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796292 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796343 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-dev" (OuterVolumeSpecName: "dev") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796408 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-sys" (OuterVolumeSpecName: "sys") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796442 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-run" (OuterVolumeSpecName: "run") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796473 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796507 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796535 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-sys" (OuterVolumeSpecName: "sys") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796660 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796743 4747 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796794 4747 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796812 4747 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796842 4747 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796860 4747 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796875 4747 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-sys\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796889 4747 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796905 4747 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-sys\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796918 4747 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-dev\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796806 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796863 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.796881 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-dev" (OuterVolumeSpecName: "dev") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.797175 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.797237 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.797528 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-logs" (OuterVolumeSpecName: "logs") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.797627 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-run" (OuterVolumeSpecName: "run") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.799452 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-logs" (OuterVolumeSpecName: "logs") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.801190 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-kube-api-access-8k2tr" (OuterVolumeSpecName: "kube-api-access-8k2tr") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "kube-api-access-8k2tr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.801552 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "glance") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.801979 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance-cache") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.802231 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-kube-api-access-plgtz" (OuterVolumeSpecName: "kube-api-access-plgtz") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "kube-api-access-plgtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.802248 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-scripts" (OuterVolumeSpecName: "scripts") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.802509 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.807850 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance-cache") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.808829 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-scripts" (OuterVolumeSpecName: "scripts") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.851701 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-config-data" (OuterVolumeSpecName: "config-data") pod "472d162f-3bf2-4a8f-a0d3-e85f452f07f7" (UID: "472d162f-3bf2-4a8f-a0d3-e85f452f07f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.854086 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-config-data" (OuterVolumeSpecName: "config-data") pod "eb9a0ae4-d5fd-45ee-866f-68d159a8084c" (UID: "eb9a0ae4-d5fd-45ee-866f-68d159a8084c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898158 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898217 4747 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898241 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898255 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898269 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-logs\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898281 4747 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898292 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898303 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898315 4747 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898331 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-logs\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898342 4747 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898356 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plgtz\" (UniqueName: \"kubernetes.io/projected/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-kube-api-access-plgtz\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898379 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898392 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898410 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898422 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eb9a0ae4-d5fd-45ee-866f-68d159a8084c-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898438 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898451 4747 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-dev\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.898463 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k2tr\" (UniqueName: \"kubernetes.io/projected/472d162f-3bf2-4a8f-a0d3-e85f452f07f7-kube-api-access-8k2tr\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.911973 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.914944 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.922502 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.922902 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.999850 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.999897 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:48 crc kubenswrapper[4747]: I1001 06:35:48.999914 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:48.999930 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.630422 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-1" Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.631588 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.632424 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"eb9a0ae4-d5fd-45ee-866f-68d159a8084c","Type":"ContainerDied","Data":"e2830bb3753d3abd4483ebc39e82b18be50660b1736660a4b8ca87ba1ef4b800"} Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.632490 4747 scope.go:117] "RemoveContainer" containerID="f0daa476109dbda9ae5b4da22ae18caf1dd3cc976a74185527104e2075195c60" Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.665398 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.674746 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-1"] Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.681773 4747 scope.go:117] "RemoveContainer" containerID="d068f74862cd71fc0519396aad4f294b909e085a4a5af2a832ff63f2f8c93b58" Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.685138 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:49 crc kubenswrapper[4747]: I1001 06:35:49.692051 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.540176 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-26qdf"] Oct 01 06:35:50 crc kubenswrapper[4747]: E1001 06:35:50.541023 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-log" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541053 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-log" Oct 01 06:35:50 crc kubenswrapper[4747]: E1001 06:35:50.541073 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerName="glance-log" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541086 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerName="glance-log" Oct 01 06:35:50 crc kubenswrapper[4747]: E1001 06:35:50.541116 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerName="glance-httpd" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541128 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerName="glance-httpd" Oct 01 06:35:50 crc kubenswrapper[4747]: E1001 06:35:50.541147 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-httpd" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541159 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-httpd" Oct 01 06:35:50 crc kubenswrapper[4747]: E1001 06:35:50.541182 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bed4500-d8df-453f-93b4-f2d093d81138" containerName="openstackclient" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541194 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bed4500-d8df-453f-93b4-f2d093d81138" containerName="openstackclient" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541477 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerName="glance-log" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541498 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-log" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541516 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" containerName="glance-httpd" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541540 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" containerName="glance-httpd" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.541555 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bed4500-d8df-453f-93b4-f2d093d81138" containerName="openstackclient" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.542355 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-26qdf" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.556142 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-26qdf"] Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.731070 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-466zr\" (UniqueName: \"kubernetes.io/projected/cae16d1f-28e0-4627-9787-f49b9cf01e89-kube-api-access-466zr\") pod \"glance-db-create-26qdf\" (UID: \"cae16d1f-28e0-4627-9787-f49b9cf01e89\") " pod="glance-kuttl-tests/glance-db-create-26qdf" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.832824 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-466zr\" (UniqueName: \"kubernetes.io/projected/cae16d1f-28e0-4627-9787-f49b9cf01e89-kube-api-access-466zr\") pod \"glance-db-create-26qdf\" (UID: \"cae16d1f-28e0-4627-9787-f49b9cf01e89\") " pod="glance-kuttl-tests/glance-db-create-26qdf" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.870737 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-466zr\" (UniqueName: \"kubernetes.io/projected/cae16d1f-28e0-4627-9787-f49b9cf01e89-kube-api-access-466zr\") pod \"glance-db-create-26qdf\" (UID: \"cae16d1f-28e0-4627-9787-f49b9cf01e89\") " pod="glance-kuttl-tests/glance-db-create-26qdf" Oct 01 06:35:50 crc kubenswrapper[4747]: I1001 06:35:50.871977 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-26qdf" Oct 01 06:35:51 crc kubenswrapper[4747]: I1001 06:35:51.296540 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="472d162f-3bf2-4a8f-a0d3-e85f452f07f7" path="/var/lib/kubelet/pods/472d162f-3bf2-4a8f-a0d3-e85f452f07f7/volumes" Oct 01 06:35:51 crc kubenswrapper[4747]: I1001 06:35:51.299485 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb9a0ae4-d5fd-45ee-866f-68d159a8084c" path="/var/lib/kubelet/pods/eb9a0ae4-d5fd-45ee-866f-68d159a8084c/volumes" Oct 01 06:35:51 crc kubenswrapper[4747]: I1001 06:35:51.447062 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-26qdf"] Oct 01 06:35:51 crc kubenswrapper[4747]: I1001 06:35:51.655931 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-26qdf" event={"ID":"cae16d1f-28e0-4627-9787-f49b9cf01e89","Type":"ContainerStarted","Data":"04d54723e0c51bbae9146140a89a5c7a59c1e3110b67700e4f543f5fb9fa6ae8"} Oct 01 06:35:52 crc kubenswrapper[4747]: I1001 06:35:52.669815 4747 generic.go:334] "Generic (PLEG): container finished" podID="cae16d1f-28e0-4627-9787-f49b9cf01e89" containerID="e28bf6359fd12613cd7231a72a0f064b6ee2c120ba6b6d54eb56b88bad74a820" exitCode=0 Oct 01 06:35:52 crc kubenswrapper[4747]: I1001 06:35:52.669956 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-26qdf" event={"ID":"cae16d1f-28e0-4627-9787-f49b9cf01e89","Type":"ContainerDied","Data":"e28bf6359fd12613cd7231a72a0f064b6ee2c120ba6b6d54eb56b88bad74a820"} Oct 01 06:35:54 crc kubenswrapper[4747]: I1001 06:35:54.089171 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-26qdf" Oct 01 06:35:54 crc kubenswrapper[4747]: I1001 06:35:54.197016 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-466zr\" (UniqueName: \"kubernetes.io/projected/cae16d1f-28e0-4627-9787-f49b9cf01e89-kube-api-access-466zr\") pod \"cae16d1f-28e0-4627-9787-f49b9cf01e89\" (UID: \"cae16d1f-28e0-4627-9787-f49b9cf01e89\") " Oct 01 06:35:54 crc kubenswrapper[4747]: I1001 06:35:54.203146 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cae16d1f-28e0-4627-9787-f49b9cf01e89-kube-api-access-466zr" (OuterVolumeSpecName: "kube-api-access-466zr") pod "cae16d1f-28e0-4627-9787-f49b9cf01e89" (UID: "cae16d1f-28e0-4627-9787-f49b9cf01e89"). InnerVolumeSpecName "kube-api-access-466zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:35:54 crc kubenswrapper[4747]: I1001 06:35:54.298652 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-466zr\" (UniqueName: \"kubernetes.io/projected/cae16d1f-28e0-4627-9787-f49b9cf01e89-kube-api-access-466zr\") on node \"crc\" DevicePath \"\"" Oct 01 06:35:54 crc kubenswrapper[4747]: I1001 06:35:54.692507 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-26qdf" event={"ID":"cae16d1f-28e0-4627-9787-f49b9cf01e89","Type":"ContainerDied","Data":"04d54723e0c51bbae9146140a89a5c7a59c1e3110b67700e4f543f5fb9fa6ae8"} Oct 01 06:35:54 crc kubenswrapper[4747]: I1001 06:35:54.692563 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04d54723e0c51bbae9146140a89a5c7a59c1e3110b67700e4f543f5fb9fa6ae8" Oct 01 06:35:54 crc kubenswrapper[4747]: I1001 06:35:54.692661 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-26qdf" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.586287 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-12b5-account-create-7gkdh"] Oct 01 06:36:00 crc kubenswrapper[4747]: E1001 06:36:00.587690 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cae16d1f-28e0-4627-9787-f49b9cf01e89" containerName="mariadb-database-create" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.587724 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="cae16d1f-28e0-4627-9787-f49b9cf01e89" containerName="mariadb-database-create" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.588119 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="cae16d1f-28e0-4627-9787-f49b9cf01e89" containerName="mariadb-database-create" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.589229 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.593483 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.601329 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-12b5-account-create-7gkdh"] Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.705537 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2c26\" (UniqueName: \"kubernetes.io/projected/e9a61e73-ac28-49eb-9d44-8a05f8147bbc-kube-api-access-z2c26\") pod \"glance-12b5-account-create-7gkdh\" (UID: \"e9a61e73-ac28-49eb-9d44-8a05f8147bbc\") " pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.807291 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2c26\" (UniqueName: \"kubernetes.io/projected/e9a61e73-ac28-49eb-9d44-8a05f8147bbc-kube-api-access-z2c26\") pod \"glance-12b5-account-create-7gkdh\" (UID: \"e9a61e73-ac28-49eb-9d44-8a05f8147bbc\") " pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.847163 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2c26\" (UniqueName: \"kubernetes.io/projected/e9a61e73-ac28-49eb-9d44-8a05f8147bbc-kube-api-access-z2c26\") pod \"glance-12b5-account-create-7gkdh\" (UID: \"e9a61e73-ac28-49eb-9d44-8a05f8147bbc\") " pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" Oct 01 06:36:00 crc kubenswrapper[4747]: I1001 06:36:00.919780 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" Oct 01 06:36:01 crc kubenswrapper[4747]: I1001 06:36:01.476193 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-12b5-account-create-7gkdh"] Oct 01 06:36:01 crc kubenswrapper[4747]: I1001 06:36:01.760626 4747 generic.go:334] "Generic (PLEG): container finished" podID="e9a61e73-ac28-49eb-9d44-8a05f8147bbc" containerID="cd6f7b81ac82353b65be132c14fff1e89895469539f1fe6d8119dbbef93f4270" exitCode=0 Oct 01 06:36:01 crc kubenswrapper[4747]: I1001 06:36:01.760699 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" event={"ID":"e9a61e73-ac28-49eb-9d44-8a05f8147bbc","Type":"ContainerDied","Data":"cd6f7b81ac82353b65be132c14fff1e89895469539f1fe6d8119dbbef93f4270"} Oct 01 06:36:01 crc kubenswrapper[4747]: I1001 06:36:01.760810 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" event={"ID":"e9a61e73-ac28-49eb-9d44-8a05f8147bbc","Type":"ContainerStarted","Data":"c8153bc1fdad7acaf29f679a78442e741db36f0a5e0a644ccc420047222c434e"} Oct 01 06:36:03 crc kubenswrapper[4747]: I1001 06:36:03.159291 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" Oct 01 06:36:03 crc kubenswrapper[4747]: I1001 06:36:03.248386 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2c26\" (UniqueName: \"kubernetes.io/projected/e9a61e73-ac28-49eb-9d44-8a05f8147bbc-kube-api-access-z2c26\") pod \"e9a61e73-ac28-49eb-9d44-8a05f8147bbc\" (UID: \"e9a61e73-ac28-49eb-9d44-8a05f8147bbc\") " Oct 01 06:36:03 crc kubenswrapper[4747]: I1001 06:36:03.255437 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9a61e73-ac28-49eb-9d44-8a05f8147bbc-kube-api-access-z2c26" (OuterVolumeSpecName: "kube-api-access-z2c26") pod "e9a61e73-ac28-49eb-9d44-8a05f8147bbc" (UID: "e9a61e73-ac28-49eb-9d44-8a05f8147bbc"). InnerVolumeSpecName "kube-api-access-z2c26". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:36:03 crc kubenswrapper[4747]: I1001 06:36:03.350503 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2c26\" (UniqueName: \"kubernetes.io/projected/e9a61e73-ac28-49eb-9d44-8a05f8147bbc-kube-api-access-z2c26\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:03 crc kubenswrapper[4747]: I1001 06:36:03.782139 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" event={"ID":"e9a61e73-ac28-49eb-9d44-8a05f8147bbc","Type":"ContainerDied","Data":"c8153bc1fdad7acaf29f679a78442e741db36f0a5e0a644ccc420047222c434e"} Oct 01 06:36:03 crc kubenswrapper[4747]: I1001 06:36:03.782220 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8153bc1fdad7acaf29f679a78442e741db36f0a5e0a644ccc420047222c434e" Oct 01 06:36:03 crc kubenswrapper[4747]: I1001 06:36:03.782249 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-12b5-account-create-7gkdh" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.897045 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-9lxg7"] Oct 01 06:36:05 crc kubenswrapper[4747]: E1001 06:36:05.897876 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9a61e73-ac28-49eb-9d44-8a05f8147bbc" containerName="mariadb-account-create" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.897895 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9a61e73-ac28-49eb-9d44-8a05f8147bbc" containerName="mariadb-account-create" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.898072 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9a61e73-ac28-49eb-9d44-8a05f8147bbc" containerName="mariadb-account-create" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.898924 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.907952 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.908108 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-v7b4l" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.907972 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.932643 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-9lxg7"] Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.990609 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-combined-ca-bundle\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.990815 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-config-data\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.990863 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw77w\" (UniqueName: \"kubernetes.io/projected/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-kube-api-access-nw77w\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:05 crc kubenswrapper[4747]: I1001 06:36:05.990927 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-db-sync-config-data\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.092128 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-combined-ca-bundle\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.092253 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-config-data\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.092283 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw77w\" (UniqueName: \"kubernetes.io/projected/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-kube-api-access-nw77w\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.092308 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-db-sync-config-data\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.099469 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-config-data\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.101021 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-combined-ca-bundle\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.106563 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-db-sync-config-data\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.126501 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw77w\" (UniqueName: \"kubernetes.io/projected/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-kube-api-access-nw77w\") pod \"glance-db-sync-9lxg7\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.233866 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.731623 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-9lxg7"] Oct 01 06:36:06 crc kubenswrapper[4747]: I1001 06:36:06.816794 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-9lxg7" event={"ID":"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd","Type":"ContainerStarted","Data":"7d8021404bcc1a2833db2b57d9632c805dbb7238d57994c07608a872d794396a"} Oct 01 06:36:07 crc kubenswrapper[4747]: I1001 06:36:07.830819 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-9lxg7" event={"ID":"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd","Type":"ContainerStarted","Data":"26f24683876d56b5f76470f21ee13444fb124e181f00ec7b01905f022e153ea4"} Oct 01 06:36:07 crc kubenswrapper[4747]: I1001 06:36:07.858856 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-9lxg7" podStartSLOduration=2.858833128 podStartE2EDuration="2.858833128s" podCreationTimestamp="2025-10-01 06:36:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:36:07.853103817 +0000 UTC m=+1169.262760936" watchObservedRunningTime="2025-10-01 06:36:07.858833128 +0000 UTC m=+1169.268490217" Oct 01 06:36:10 crc kubenswrapper[4747]: I1001 06:36:10.877001 4747 generic.go:334] "Generic (PLEG): container finished" podID="f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" containerID="26f24683876d56b5f76470f21ee13444fb124e181f00ec7b01905f022e153ea4" exitCode=0 Oct 01 06:36:10 crc kubenswrapper[4747]: I1001 06:36:10.877134 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-9lxg7" event={"ID":"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd","Type":"ContainerDied","Data":"26f24683876d56b5f76470f21ee13444fb124e181f00ec7b01905f022e153ea4"} Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.272534 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.393690 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-combined-ca-bundle\") pod \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.393820 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-db-sync-config-data\") pod \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.393948 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-config-data\") pod \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.393990 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw77w\" (UniqueName: \"kubernetes.io/projected/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-kube-api-access-nw77w\") pod \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\" (UID: \"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd\") " Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.403362 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-kube-api-access-nw77w" (OuterVolumeSpecName: "kube-api-access-nw77w") pod "f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" (UID: "f8ef980d-251a-4a8a-bdf7-18e7260ee8cd"). InnerVolumeSpecName "kube-api-access-nw77w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.404141 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" (UID: "f8ef980d-251a-4a8a-bdf7-18e7260ee8cd"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.428514 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" (UID: "f8ef980d-251a-4a8a-bdf7-18e7260ee8cd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.465559 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-config-data" (OuterVolumeSpecName: "config-data") pod "f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" (UID: "f8ef980d-251a-4a8a-bdf7-18e7260ee8cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.495298 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.495331 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.495341 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw77w\" (UniqueName: \"kubernetes.io/projected/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-kube-api-access-nw77w\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.495350 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.898053 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-9lxg7" event={"ID":"f8ef980d-251a-4a8a-bdf7-18e7260ee8cd","Type":"ContainerDied","Data":"7d8021404bcc1a2833db2b57d9632c805dbb7238d57994c07608a872d794396a"} Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.898116 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d8021404bcc1a2833db2b57d9632c805dbb7238d57994c07608a872d794396a" Oct 01 06:36:12 crc kubenswrapper[4747]: I1001 06:36:12.898155 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-9lxg7" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.258343 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:13 crc kubenswrapper[4747]: E1001 06:36:13.258675 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" containerName="glance-db-sync" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.258697 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" containerName="glance-db-sync" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.258890 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" containerName="glance-db-sync" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.263714 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.267051 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.267477 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-internal-svc" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.267657 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-public-svc" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.267841 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.268140 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-v7b4l" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.268439 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.291064 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.330033 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:13 crc kubenswrapper[4747]: E1001 06:36:13.330500 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data glance httpd-run internal-tls-certs kube-api-access-qxmvx logs public-tls-certs scripts], unattached volumes=[], failed to process volumes=[combined-ca-bundle config-data glance httpd-run internal-tls-certs kube-api-access-qxmvx logs public-tls-certs scripts]: context canceled" pod="glance-kuttl-tests/glance-default-single-0" podUID="86d4fc7b-96fa-48dc-8e76-f1a838830326" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.430851 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.430931 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxmvx\" (UniqueName: \"kubernetes.io/projected/86d4fc7b-96fa-48dc-8e76-f1a838830326-kube-api-access-qxmvx\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.431056 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.431140 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.431174 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-scripts\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.431271 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-config-data\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.431346 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-logs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.431380 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.431536 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-httpd-run\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532593 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxmvx\" (UniqueName: \"kubernetes.io/projected/86d4fc7b-96fa-48dc-8e76-f1a838830326-kube-api-access-qxmvx\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532654 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532689 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532716 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-scripts\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532779 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-config-data\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532809 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-logs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532828 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532866 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-httpd-run\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532927 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.532997 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") device mount path \"/mnt/openstack/pv10\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.533415 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-logs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.534035 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-httpd-run\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.536146 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-scripts\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.536282 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.536315 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.537482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.562265 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.564783 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-config-data\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.577438 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxmvx\" (UniqueName: \"kubernetes.io/projected/86d4fc7b-96fa-48dc-8e76-f1a838830326-kube-api-access-qxmvx\") pod \"glance-default-single-0\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.906351 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:13 crc kubenswrapper[4747]: I1001 06:36:13.919931 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.038581 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-scripts\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.038731 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-config-data\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.038836 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxmvx\" (UniqueName: \"kubernetes.io/projected/86d4fc7b-96fa-48dc-8e76-f1a838830326-kube-api-access-qxmvx\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.038907 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-logs\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.038944 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-public-tls-certs\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.039007 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-httpd-run\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.039078 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-internal-tls-certs\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.039115 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.039156 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-combined-ca-bundle\") pod \"86d4fc7b-96fa-48dc-8e76-f1a838830326\" (UID: \"86d4fc7b-96fa-48dc-8e76-f1a838830326\") " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.042575 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-scripts" (OuterVolumeSpecName: "scripts") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.043289 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-logs" (OuterVolumeSpecName: "logs") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.044039 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.044435 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.044735 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-config-data" (OuterVolumeSpecName: "config-data") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.046015 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.046554 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86d4fc7b-96fa-48dc-8e76-f1a838830326-kube-api-access-qxmvx" (OuterVolumeSpecName: "kube-api-access-qxmvx") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "kube-api-access-qxmvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.059272 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.059747 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "86d4fc7b-96fa-48dc-8e76-f1a838830326" (UID: "86d4fc7b-96fa-48dc-8e76-f1a838830326"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140630 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140673 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140682 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140714 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140724 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140733 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140743 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86d4fc7b-96fa-48dc-8e76-f1a838830326-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140769 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxmvx\" (UniqueName: \"kubernetes.io/projected/86d4fc7b-96fa-48dc-8e76-f1a838830326-kube-api-access-qxmvx\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.140779 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/86d4fc7b-96fa-48dc-8e76-f1a838830326-logs\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.164113 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.242463 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.914545 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:14 crc kubenswrapper[4747]: I1001 06:36:14.980176 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:15 crc kubenswrapper[4747]: I1001 06:36:14.988117 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:15 crc kubenswrapper[4747]: I1001 06:36:15.290805 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86d4fc7b-96fa-48dc-8e76-f1a838830326" path="/var/lib/kubelet/pods/86d4fc7b-96fa-48dc-8e76-f1a838830326/volumes" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.036199 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.038156 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.044402 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-v7b4l" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.044722 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"combined-ca-bundle" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.045189 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.045506 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-internal-svc" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.045683 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-single-config-data" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.046033 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"cert-glance-default-public-svc" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.074982 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.172081 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-logs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.172161 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzzdp\" (UniqueName: \"kubernetes.io/projected/2d040290-02e4-42f0-b504-2d07c7535da4-kube-api-access-bzzdp\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.172203 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.172418 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-httpd-run\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.172574 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-config-data\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.172663 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-scripts\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.172691 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.173883 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.173967 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.275816 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-scripts\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.275903 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.275970 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.276042 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.276096 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-logs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.276139 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzzdp\" (UniqueName: \"kubernetes.io/projected/2d040290-02e4-42f0-b504-2d07c7535da4-kube-api-access-bzzdp\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.276171 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.276201 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-httpd-run\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.276239 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-config-data\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.277055 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") device mount path \"/mnt/openstack/pv10\"" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.277735 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-logs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.278117 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-httpd-run\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.284843 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-scripts\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.285312 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-internal-tls-certs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.285458 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-config-data\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.285533 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-public-tls-certs\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.286721 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-combined-ca-bundle\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.300999 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzzdp\" (UniqueName: \"kubernetes.io/projected/2d040290-02e4-42f0-b504-2d07c7535da4-kube-api-access-bzzdp\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.305763 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-single-0\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.370359 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.858195 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:16 crc kubenswrapper[4747]: W1001 06:36:16.871114 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d040290_02e4_42f0_b504_2d07c7535da4.slice/crio-e340814241ed848344cdca68fbca0e3c2eb45343808690afdca1d0311d3a5af2 WatchSource:0}: Error finding container e340814241ed848344cdca68fbca0e3c2eb45343808690afdca1d0311d3a5af2: Status 404 returned error can't find the container with id e340814241ed848344cdca68fbca0e3c2eb45343808690afdca1d0311d3a5af2 Oct 01 06:36:16 crc kubenswrapper[4747]: I1001 06:36:16.934070 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"2d040290-02e4-42f0-b504-2d07c7535da4","Type":"ContainerStarted","Data":"e340814241ed848344cdca68fbca0e3c2eb45343808690afdca1d0311d3a5af2"} Oct 01 06:36:17 crc kubenswrapper[4747]: I1001 06:36:17.948128 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"2d040290-02e4-42f0-b504-2d07c7535da4","Type":"ContainerStarted","Data":"17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9"} Oct 01 06:36:18 crc kubenswrapper[4747]: I1001 06:36:18.960801 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"2d040290-02e4-42f0-b504-2d07c7535da4","Type":"ContainerStarted","Data":"f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f"} Oct 01 06:36:18 crc kubenswrapper[4747]: I1001 06:36:18.989628 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-single-0" podStartSLOduration=2.989610531 podStartE2EDuration="2.989610531s" podCreationTimestamp="2025-10-01 06:36:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:36:18.98794149 +0000 UTC m=+1180.397598569" watchObservedRunningTime="2025-10-01 06:36:18.989610531 +0000 UTC m=+1180.399267580" Oct 01 06:36:26 crc kubenswrapper[4747]: I1001 06:36:26.370665 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:26 crc kubenswrapper[4747]: I1001 06:36:26.372652 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:26 crc kubenswrapper[4747]: I1001 06:36:26.412414 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:26 crc kubenswrapper[4747]: I1001 06:36:26.434961 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:27 crc kubenswrapper[4747]: I1001 06:36:27.057433 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:27 crc kubenswrapper[4747]: I1001 06:36:27.057530 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:28 crc kubenswrapper[4747]: I1001 06:36:28.992540 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:28 crc kubenswrapper[4747]: I1001 06:36:28.994972 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.394326 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-9lxg7"] Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.400333 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-9lxg7"] Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.429948 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance12b5-account-delete-n65t7"] Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.430996 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.443219 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance12b5-account-delete-n65t7"] Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.451763 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzml2\" (UniqueName: \"kubernetes.io/projected/743d432b-2ee4-4392-b23e-6cec5d4b9139-kube-api-access-tzml2\") pod \"glance12b5-account-delete-n65t7\" (UID: \"743d432b-2ee4-4392-b23e-6cec5d4b9139\") " pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.498948 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.553111 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzml2\" (UniqueName: \"kubernetes.io/projected/743d432b-2ee4-4392-b23e-6cec5d4b9139-kube-api-access-tzml2\") pod \"glance12b5-account-delete-n65t7\" (UID: \"743d432b-2ee4-4392-b23e-6cec5d4b9139\") " pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.570666 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzml2\" (UniqueName: \"kubernetes.io/projected/743d432b-2ee4-4392-b23e-6cec5d4b9139-kube-api-access-tzml2\") pod \"glance12b5-account-delete-n65t7\" (UID: \"743d432b-2ee4-4392-b23e-6cec5d4b9139\") " pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" Oct 01 06:36:30 crc kubenswrapper[4747]: I1001 06:36:30.751546 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" Oct 01 06:36:31 crc kubenswrapper[4747]: I1001 06:36:31.019404 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance12b5-account-delete-n65t7"] Oct 01 06:36:31 crc kubenswrapper[4747]: W1001 06:36:31.026512 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod743d432b_2ee4_4392_b23e_6cec5d4b9139.slice/crio-ceb6cb5637223342a612c430087cdc325dc7a9cafba7779178e8566a7f981005 WatchSource:0}: Error finding container ceb6cb5637223342a612c430087cdc325dc7a9cafba7779178e8566a7f981005: Status 404 returned error can't find the container with id ceb6cb5637223342a612c430087cdc325dc7a9cafba7779178e8566a7f981005 Oct 01 06:36:31 crc kubenswrapper[4747]: I1001 06:36:31.100481 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" containerName="glance-log" containerID="cri-o://17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9" gracePeriod=30 Oct 01 06:36:31 crc kubenswrapper[4747]: I1001 06:36:31.100868 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" event={"ID":"743d432b-2ee4-4392-b23e-6cec5d4b9139","Type":"ContainerStarted","Data":"ceb6cb5637223342a612c430087cdc325dc7a9cafba7779178e8566a7f981005"} Oct 01 06:36:31 crc kubenswrapper[4747]: I1001 06:36:31.100993 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-single-0" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" containerName="glance-httpd" containerID="cri-o://f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f" gracePeriod=30 Oct 01 06:36:31 crc kubenswrapper[4747]: I1001 06:36:31.290929 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8ef980d-251a-4a8a-bdf7-18e7260ee8cd" path="/var/lib/kubelet/pods/f8ef980d-251a-4a8a-bdf7-18e7260ee8cd/volumes" Oct 01 06:36:32 crc kubenswrapper[4747]: I1001 06:36:32.112327 4747 generic.go:334] "Generic (PLEG): container finished" podID="743d432b-2ee4-4392-b23e-6cec5d4b9139" containerID="79b3797b286723e514e5731f90237633dd8bac75e10f1253f848078bbc453b8d" exitCode=0 Oct 01 06:36:32 crc kubenswrapper[4747]: I1001 06:36:32.112385 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" event={"ID":"743d432b-2ee4-4392-b23e-6cec5d4b9139","Type":"ContainerDied","Data":"79b3797b286723e514e5731f90237633dd8bac75e10f1253f848078bbc453b8d"} Oct 01 06:36:32 crc kubenswrapper[4747]: I1001 06:36:32.115653 4747 generic.go:334] "Generic (PLEG): container finished" podID="2d040290-02e4-42f0-b504-2d07c7535da4" containerID="17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9" exitCode=143 Oct 01 06:36:32 crc kubenswrapper[4747]: I1001 06:36:32.115863 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"2d040290-02e4-42f0-b504-2d07c7535da4","Type":"ContainerDied","Data":"17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9"} Oct 01 06:36:33 crc kubenswrapper[4747]: I1001 06:36:33.541656 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" Oct 01 06:36:33 crc kubenswrapper[4747]: I1001 06:36:33.597415 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzml2\" (UniqueName: \"kubernetes.io/projected/743d432b-2ee4-4392-b23e-6cec5d4b9139-kube-api-access-tzml2\") pod \"743d432b-2ee4-4392-b23e-6cec5d4b9139\" (UID: \"743d432b-2ee4-4392-b23e-6cec5d4b9139\") " Oct 01 06:36:33 crc kubenswrapper[4747]: I1001 06:36:33.603733 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/743d432b-2ee4-4392-b23e-6cec5d4b9139-kube-api-access-tzml2" (OuterVolumeSpecName: "kube-api-access-tzml2") pod "743d432b-2ee4-4392-b23e-6cec5d4b9139" (UID: "743d432b-2ee4-4392-b23e-6cec5d4b9139"). InnerVolumeSpecName "kube-api-access-tzml2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:36:33 crc kubenswrapper[4747]: I1001 06:36:33.699509 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzml2\" (UniqueName: \"kubernetes.io/projected/743d432b-2ee4-4392-b23e-6cec5d4b9139-kube-api-access-tzml2\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.140278 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" event={"ID":"743d432b-2ee4-4392-b23e-6cec5d4b9139","Type":"ContainerDied","Data":"ceb6cb5637223342a612c430087cdc325dc7a9cafba7779178e8566a7f981005"} Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.140698 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ceb6cb5637223342a612c430087cdc325dc7a9cafba7779178e8566a7f981005" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.140389 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance12b5-account-delete-n65t7" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.647003 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721271 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-scripts\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721392 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-internal-tls-certs\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721433 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-config-data\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721471 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-combined-ca-bundle\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721505 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzzdp\" (UniqueName: \"kubernetes.io/projected/2d040290-02e4-42f0-b504-2d07c7535da4-kube-api-access-bzzdp\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721550 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-logs\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721594 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721656 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-httpd-run\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.721709 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-public-tls-certs\") pod \"2d040290-02e4-42f0-b504-2d07c7535da4\" (UID: \"2d040290-02e4-42f0-b504-2d07c7535da4\") " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.722181 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-logs" (OuterVolumeSpecName: "logs") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.722831 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.725600 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.725859 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d040290-02e4-42f0-b504-2d07c7535da4-kube-api-access-bzzdp" (OuterVolumeSpecName: "kube-api-access-bzzdp") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "kube-api-access-bzzdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.727836 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-scripts" (OuterVolumeSpecName: "scripts") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.764266 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.769464 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.781574 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-config-data" (OuterVolumeSpecName: "config-data") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.789113 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2d040290-02e4-42f0-b504-2d07c7535da4" (UID: "2d040290-02e4-42f0-b504-2d07c7535da4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823813 4747 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823848 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823860 4747 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823872 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823887 4747 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d040290-02e4-42f0-b504-2d07c7535da4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823899 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzzdp\" (UniqueName: \"kubernetes.io/projected/2d040290-02e4-42f0-b504-2d07c7535da4-kube-api-access-bzzdp\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823911 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-logs\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823941 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.823956 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2d040290-02e4-42f0-b504-2d07c7535da4-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.847248 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 01 06:36:34 crc kubenswrapper[4747]: I1001 06:36:34.926168 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.153211 4747 generic.go:334] "Generic (PLEG): container finished" podID="2d040290-02e4-42f0-b504-2d07c7535da4" containerID="f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f" exitCode=0 Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.153274 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"2d040290-02e4-42f0-b504-2d07c7535da4","Type":"ContainerDied","Data":"f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f"} Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.153309 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-single-0" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.153331 4747 scope.go:117] "RemoveContainer" containerID="f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.153314 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-single-0" event={"ID":"2d040290-02e4-42f0-b504-2d07c7535da4","Type":"ContainerDied","Data":"e340814241ed848344cdca68fbca0e3c2eb45343808690afdca1d0311d3a5af2"} Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.184398 4747 scope.go:117] "RemoveContainer" containerID="17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.202635 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.211122 4747 scope.go:117] "RemoveContainer" containerID="f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f" Oct 01 06:36:35 crc kubenswrapper[4747]: E1001 06:36:35.211963 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f\": container with ID starting with f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f not found: ID does not exist" containerID="f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.212018 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f"} err="failed to get container status \"f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f\": rpc error: code = NotFound desc = could not find container \"f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f\": container with ID starting with f4a870535f69cc90940a9ceff8bca58ca0a42c2527d6ec446f18a360e9217e9f not found: ID does not exist" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.212056 4747 scope.go:117] "RemoveContainer" containerID="17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.212158 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-single-0"] Oct 01 06:36:35 crc kubenswrapper[4747]: E1001 06:36:35.212822 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9\": container with ID starting with 17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9 not found: ID does not exist" containerID="17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.212870 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9"} err="failed to get container status \"17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9\": rpc error: code = NotFound desc = could not find container \"17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9\": container with ID starting with 17af4380518b799c449bf0b1df9aa0a964fc461afc22f75bf0cbe9f32667dae9 not found: ID does not exist" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.294235 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" path="/var/lib/kubelet/pods/2d040290-02e4-42f0-b504-2d07c7535da4/volumes" Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.467495 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-26qdf"] Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.479032 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-26qdf"] Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.487327 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-12b5-account-create-7gkdh"] Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.494634 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance12b5-account-delete-n65t7"] Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.501810 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-12b5-account-create-7gkdh"] Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.508484 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance12b5-account-delete-n65t7"] Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.761678 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:36:35 crc kubenswrapper[4747]: I1001 06:36:35.761825 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.156831 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-create-8gmd8"] Oct 01 06:36:36 crc kubenswrapper[4747]: E1001 06:36:36.157509 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" containerName="glance-log" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.157524 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" containerName="glance-log" Oct 01 06:36:36 crc kubenswrapper[4747]: E1001 06:36:36.157550 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" containerName="glance-httpd" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.157560 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" containerName="glance-httpd" Oct 01 06:36:36 crc kubenswrapper[4747]: E1001 06:36:36.157570 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="743d432b-2ee4-4392-b23e-6cec5d4b9139" containerName="mariadb-account-delete" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.157579 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="743d432b-2ee4-4392-b23e-6cec5d4b9139" containerName="mariadb-account-delete" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.157781 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" containerName="glance-log" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.157810 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="743d432b-2ee4-4392-b23e-6cec5d4b9139" containerName="mariadb-account-delete" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.157837 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d040290-02e4-42f0-b504-2d07c7535da4" containerName="glance-httpd" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.158357 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-8gmd8" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.175590 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-8gmd8"] Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.251026 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsszh\" (UniqueName: \"kubernetes.io/projected/ae14f798-2ba0-4b69-9e18-e55a48b38b71-kube-api-access-rsszh\") pod \"glance-db-create-8gmd8\" (UID: \"ae14f798-2ba0-4b69-9e18-e55a48b38b71\") " pod="glance-kuttl-tests/glance-db-create-8gmd8" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.352700 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsszh\" (UniqueName: \"kubernetes.io/projected/ae14f798-2ba0-4b69-9e18-e55a48b38b71-kube-api-access-rsszh\") pod \"glance-db-create-8gmd8\" (UID: \"ae14f798-2ba0-4b69-9e18-e55a48b38b71\") " pod="glance-kuttl-tests/glance-db-create-8gmd8" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.378863 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsszh\" (UniqueName: \"kubernetes.io/projected/ae14f798-2ba0-4b69-9e18-e55a48b38b71-kube-api-access-rsszh\") pod \"glance-db-create-8gmd8\" (UID: \"ae14f798-2ba0-4b69-9e18-e55a48b38b71\") " pod="glance-kuttl-tests/glance-db-create-8gmd8" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.477275 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-8gmd8" Oct 01 06:36:36 crc kubenswrapper[4747]: I1001 06:36:36.784261 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-create-8gmd8"] Oct 01 06:36:37 crc kubenswrapper[4747]: I1001 06:36:37.178982 4747 generic.go:334] "Generic (PLEG): container finished" podID="ae14f798-2ba0-4b69-9e18-e55a48b38b71" containerID="1fee38e3b98cf70adbceeec25727f3aee1bca6137f92016cb825de41e4a66c16" exitCode=0 Oct 01 06:36:37 crc kubenswrapper[4747]: I1001 06:36:37.179050 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-8gmd8" event={"ID":"ae14f798-2ba0-4b69-9e18-e55a48b38b71","Type":"ContainerDied","Data":"1fee38e3b98cf70adbceeec25727f3aee1bca6137f92016cb825de41e4a66c16"} Oct 01 06:36:37 crc kubenswrapper[4747]: I1001 06:36:37.179114 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-8gmd8" event={"ID":"ae14f798-2ba0-4b69-9e18-e55a48b38b71","Type":"ContainerStarted","Data":"95b2f05cd40b664e4bb908455921b4db04a706fac169d5b9ebba7503e9b5da6f"} Oct 01 06:36:37 crc kubenswrapper[4747]: I1001 06:36:37.302278 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="743d432b-2ee4-4392-b23e-6cec5d4b9139" path="/var/lib/kubelet/pods/743d432b-2ee4-4392-b23e-6cec5d4b9139/volumes" Oct 01 06:36:37 crc kubenswrapper[4747]: I1001 06:36:37.303102 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cae16d1f-28e0-4627-9787-f49b9cf01e89" path="/var/lib/kubelet/pods/cae16d1f-28e0-4627-9787-f49b9cf01e89/volumes" Oct 01 06:36:37 crc kubenswrapper[4747]: I1001 06:36:37.303982 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9a61e73-ac28-49eb-9d44-8a05f8147bbc" path="/var/lib/kubelet/pods/e9a61e73-ac28-49eb-9d44-8a05f8147bbc/volumes" Oct 01 06:36:38 crc kubenswrapper[4747]: I1001 06:36:38.589775 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-8gmd8" Oct 01 06:36:38 crc kubenswrapper[4747]: I1001 06:36:38.690396 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsszh\" (UniqueName: \"kubernetes.io/projected/ae14f798-2ba0-4b69-9e18-e55a48b38b71-kube-api-access-rsszh\") pod \"ae14f798-2ba0-4b69-9e18-e55a48b38b71\" (UID: \"ae14f798-2ba0-4b69-9e18-e55a48b38b71\") " Oct 01 06:36:38 crc kubenswrapper[4747]: I1001 06:36:38.699117 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae14f798-2ba0-4b69-9e18-e55a48b38b71-kube-api-access-rsszh" (OuterVolumeSpecName: "kube-api-access-rsszh") pod "ae14f798-2ba0-4b69-9e18-e55a48b38b71" (UID: "ae14f798-2ba0-4b69-9e18-e55a48b38b71"). InnerVolumeSpecName "kube-api-access-rsszh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:36:38 crc kubenswrapper[4747]: I1001 06:36:38.792523 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsszh\" (UniqueName: \"kubernetes.io/projected/ae14f798-2ba0-4b69-9e18-e55a48b38b71-kube-api-access-rsszh\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:39 crc kubenswrapper[4747]: I1001 06:36:39.202134 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-create-8gmd8" event={"ID":"ae14f798-2ba0-4b69-9e18-e55a48b38b71","Type":"ContainerDied","Data":"95b2f05cd40b664e4bb908455921b4db04a706fac169d5b9ebba7503e9b5da6f"} Oct 01 06:36:39 crc kubenswrapper[4747]: I1001 06:36:39.202190 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95b2f05cd40b664e4bb908455921b4db04a706fac169d5b9ebba7503e9b5da6f" Oct 01 06:36:39 crc kubenswrapper[4747]: I1001 06:36:39.202277 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-create-8gmd8" Oct 01 06:36:39 crc kubenswrapper[4747]: I1001 06:36:39.950396 4747 scope.go:117] "RemoveContainer" containerID="28a110bdf48346cef397f898556d8e2102e46bb35b873ebdee439d9ac850e60b" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.280364 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-b4f6-account-create-zkdj4"] Oct 01 06:36:46 crc kubenswrapper[4747]: E1001 06:36:46.281423 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae14f798-2ba0-4b69-9e18-e55a48b38b71" containerName="mariadb-database-create" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.281444 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae14f798-2ba0-4b69-9e18-e55a48b38b71" containerName="mariadb-database-create" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.281929 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae14f798-2ba0-4b69-9e18-e55a48b38b71" containerName="mariadb-database-create" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.282787 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.287099 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-db-secret" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.292161 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-b4f6-account-create-zkdj4"] Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.322970 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2cfp\" (UniqueName: \"kubernetes.io/projected/fcde1a2d-d437-45e9-ab7a-37ce46038e1c-kube-api-access-n2cfp\") pod \"glance-b4f6-account-create-zkdj4\" (UID: \"fcde1a2d-d437-45e9-ab7a-37ce46038e1c\") " pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.424676 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2cfp\" (UniqueName: \"kubernetes.io/projected/fcde1a2d-d437-45e9-ab7a-37ce46038e1c-kube-api-access-n2cfp\") pod \"glance-b4f6-account-create-zkdj4\" (UID: \"fcde1a2d-d437-45e9-ab7a-37ce46038e1c\") " pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.453207 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2cfp\" (UniqueName: \"kubernetes.io/projected/fcde1a2d-d437-45e9-ab7a-37ce46038e1c-kube-api-access-n2cfp\") pod \"glance-b4f6-account-create-zkdj4\" (UID: \"fcde1a2d-d437-45e9-ab7a-37ce46038e1c\") " pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.633231 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" Oct 01 06:36:46 crc kubenswrapper[4747]: I1001 06:36:46.961527 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-b4f6-account-create-zkdj4"] Oct 01 06:36:47 crc kubenswrapper[4747]: I1001 06:36:47.278528 4747 generic.go:334] "Generic (PLEG): container finished" podID="fcde1a2d-d437-45e9-ab7a-37ce46038e1c" containerID="749590e3d2182e0bab6ceeee832b22c2f68266702e759161b1a68972144972f4" exitCode=0 Oct 01 06:36:47 crc kubenswrapper[4747]: I1001 06:36:47.292305 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" event={"ID":"fcde1a2d-d437-45e9-ab7a-37ce46038e1c","Type":"ContainerDied","Data":"749590e3d2182e0bab6ceeee832b22c2f68266702e759161b1a68972144972f4"} Oct 01 06:36:47 crc kubenswrapper[4747]: I1001 06:36:47.292374 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" event={"ID":"fcde1a2d-d437-45e9-ab7a-37ce46038e1c","Type":"ContainerStarted","Data":"4ac277deb01c16bbf724ba7929bdd3f0d80963659d465ed7b2008d8d5ad22a45"} Oct 01 06:36:48 crc kubenswrapper[4747]: I1001 06:36:48.703502 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" Oct 01 06:36:48 crc kubenswrapper[4747]: I1001 06:36:48.765570 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2cfp\" (UniqueName: \"kubernetes.io/projected/fcde1a2d-d437-45e9-ab7a-37ce46038e1c-kube-api-access-n2cfp\") pod \"fcde1a2d-d437-45e9-ab7a-37ce46038e1c\" (UID: \"fcde1a2d-d437-45e9-ab7a-37ce46038e1c\") " Oct 01 06:36:48 crc kubenswrapper[4747]: I1001 06:36:48.774231 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcde1a2d-d437-45e9-ab7a-37ce46038e1c-kube-api-access-n2cfp" (OuterVolumeSpecName: "kube-api-access-n2cfp") pod "fcde1a2d-d437-45e9-ab7a-37ce46038e1c" (UID: "fcde1a2d-d437-45e9-ab7a-37ce46038e1c"). InnerVolumeSpecName "kube-api-access-n2cfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:36:48 crc kubenswrapper[4747]: I1001 06:36:48.867407 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2cfp\" (UniqueName: \"kubernetes.io/projected/fcde1a2d-d437-45e9-ab7a-37ce46038e1c-kube-api-access-n2cfp\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:49 crc kubenswrapper[4747]: I1001 06:36:49.315014 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" event={"ID":"fcde1a2d-d437-45e9-ab7a-37ce46038e1c","Type":"ContainerDied","Data":"4ac277deb01c16bbf724ba7929bdd3f0d80963659d465ed7b2008d8d5ad22a45"} Oct 01 06:36:49 crc kubenswrapper[4747]: I1001 06:36:49.315386 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ac277deb01c16bbf724ba7929bdd3f0d80963659d465ed7b2008d8d5ad22a45" Oct 01 06:36:49 crc kubenswrapper[4747]: I1001 06:36:49.315136 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-b4f6-account-create-zkdj4" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.337853 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-db-sync-rrxt7"] Oct 01 06:36:51 crc kubenswrapper[4747]: E1001 06:36:51.338315 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcde1a2d-d437-45e9-ab7a-37ce46038e1c" containerName="mariadb-account-create" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.338350 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcde1a2d-d437-45e9-ab7a-37ce46038e1c" containerName="mariadb-account-create" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.338565 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcde1a2d-d437-45e9-ab7a-37ce46038e1c" containerName="mariadb-account-create" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.339364 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.342344 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-p4gnh" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.342675 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-config-data" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.353385 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-rrxt7"] Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.410005 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-config-data\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.410055 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-db-sync-config-data\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.410087 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdcrr\" (UniqueName: \"kubernetes.io/projected/2c39714a-ba3e-4c86-88d6-0a60bc88d227-kube-api-access-zdcrr\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.511106 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdcrr\" (UniqueName: \"kubernetes.io/projected/2c39714a-ba3e-4c86-88d6-0a60bc88d227-kube-api-access-zdcrr\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.511286 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-config-data\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.511314 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-db-sync-config-data\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.516402 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-db-sync-config-data\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.516543 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-config-data\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.531870 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdcrr\" (UniqueName: \"kubernetes.io/projected/2c39714a-ba3e-4c86-88d6-0a60bc88d227-kube-api-access-zdcrr\") pod \"glance-db-sync-rrxt7\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.710033 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:51 crc kubenswrapper[4747]: I1001 06:36:51.985809 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-db-sync-rrxt7"] Oct 01 06:36:51 crc kubenswrapper[4747]: W1001 06:36:51.993682 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c39714a_ba3e_4c86_88d6_0a60bc88d227.slice/crio-5a18bcefc5ab7e5cdd0ce7e36b18669ca98d2ede7f263898d3318c3d6c28b7c7 WatchSource:0}: Error finding container 5a18bcefc5ab7e5cdd0ce7e36b18669ca98d2ede7f263898d3318c3d6c28b7c7: Status 404 returned error can't find the container with id 5a18bcefc5ab7e5cdd0ce7e36b18669ca98d2ede7f263898d3318c3d6c28b7c7 Oct 01 06:36:52 crc kubenswrapper[4747]: I1001 06:36:52.349737 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-rrxt7" event={"ID":"2c39714a-ba3e-4c86-88d6-0a60bc88d227","Type":"ContainerStarted","Data":"5a18bcefc5ab7e5cdd0ce7e36b18669ca98d2ede7f263898d3318c3d6c28b7c7"} Oct 01 06:36:53 crc kubenswrapper[4747]: I1001 06:36:53.359713 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-rrxt7" event={"ID":"2c39714a-ba3e-4c86-88d6-0a60bc88d227","Type":"ContainerStarted","Data":"bb09985c27f593139b7d41ef2d5e785a872ef540cfcd8dc56be5420857a42b2d"} Oct 01 06:36:53 crc kubenswrapper[4747]: I1001 06:36:53.380957 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-db-sync-rrxt7" podStartSLOduration=2.3809365160000002 podStartE2EDuration="2.380936516s" podCreationTimestamp="2025-10-01 06:36:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:36:53.375786689 +0000 UTC m=+1214.785443758" watchObservedRunningTime="2025-10-01 06:36:53.380936516 +0000 UTC m=+1214.790593575" Oct 01 06:36:56 crc kubenswrapper[4747]: I1001 06:36:56.392847 4747 generic.go:334] "Generic (PLEG): container finished" podID="2c39714a-ba3e-4c86-88d6-0a60bc88d227" containerID="bb09985c27f593139b7d41ef2d5e785a872ef540cfcd8dc56be5420857a42b2d" exitCode=0 Oct 01 06:36:56 crc kubenswrapper[4747]: I1001 06:36:56.392999 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-rrxt7" event={"ID":"2c39714a-ba3e-4c86-88d6-0a60bc88d227","Type":"ContainerDied","Data":"bb09985c27f593139b7d41ef2d5e785a872ef540cfcd8dc56be5420857a42b2d"} Oct 01 06:36:57 crc kubenswrapper[4747]: I1001 06:36:57.796610 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:57 crc kubenswrapper[4747]: I1001 06:36:57.912806 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-config-data\") pod \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " Oct 01 06:36:57 crc kubenswrapper[4747]: I1001 06:36:57.913229 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdcrr\" (UniqueName: \"kubernetes.io/projected/2c39714a-ba3e-4c86-88d6-0a60bc88d227-kube-api-access-zdcrr\") pod \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " Oct 01 06:36:57 crc kubenswrapper[4747]: I1001 06:36:57.913606 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-db-sync-config-data\") pod \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\" (UID: \"2c39714a-ba3e-4c86-88d6-0a60bc88d227\") " Oct 01 06:36:57 crc kubenswrapper[4747]: I1001 06:36:57.919668 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2c39714a-ba3e-4c86-88d6-0a60bc88d227" (UID: "2c39714a-ba3e-4c86-88d6-0a60bc88d227"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:57 crc kubenswrapper[4747]: I1001 06:36:57.926222 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c39714a-ba3e-4c86-88d6-0a60bc88d227-kube-api-access-zdcrr" (OuterVolumeSpecName: "kube-api-access-zdcrr") pod "2c39714a-ba3e-4c86-88d6-0a60bc88d227" (UID: "2c39714a-ba3e-4c86-88d6-0a60bc88d227"). InnerVolumeSpecName "kube-api-access-zdcrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:36:57 crc kubenswrapper[4747]: I1001 06:36:57.944232 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-config-data" (OuterVolumeSpecName: "config-data") pod "2c39714a-ba3e-4c86-88d6-0a60bc88d227" (UID: "2c39714a-ba3e-4c86-88d6-0a60bc88d227"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:36:58 crc kubenswrapper[4747]: I1001 06:36:58.015768 4747 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:58 crc kubenswrapper[4747]: I1001 06:36:58.015802 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c39714a-ba3e-4c86-88d6-0a60bc88d227-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:58 crc kubenswrapper[4747]: I1001 06:36:58.015814 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdcrr\" (UniqueName: \"kubernetes.io/projected/2c39714a-ba3e-4c86-88d6-0a60bc88d227-kube-api-access-zdcrr\") on node \"crc\" DevicePath \"\"" Oct 01 06:36:58 crc kubenswrapper[4747]: I1001 06:36:58.417561 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-db-sync-rrxt7" event={"ID":"2c39714a-ba3e-4c86-88d6-0a60bc88d227","Type":"ContainerDied","Data":"5a18bcefc5ab7e5cdd0ce7e36b18669ca98d2ede7f263898d3318c3d6c28b7c7"} Oct 01 06:36:58 crc kubenswrapper[4747]: I1001 06:36:58.417618 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-db-sync-rrxt7" Oct 01 06:36:58 crc kubenswrapper[4747]: I1001 06:36:58.417625 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a18bcefc5ab7e5cdd0ce7e36b18669ca98d2ede7f263898d3318c3d6c28b7c7" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.253154 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 01 06:37:00 crc kubenswrapper[4747]: E1001 06:37:00.253829 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c39714a-ba3e-4c86-88d6-0a60bc88d227" containerName="glance-db-sync" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.253851 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c39714a-ba3e-4c86-88d6-0a60bc88d227" containerName="glance-db-sync" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.254095 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c39714a-ba3e-4c86-88d6-0a60bc88d227" containerName="glance-db-sync" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.255710 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.258337 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-scripts" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.259096 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-glance-dockercfg-p4gnh" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.266195 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-external-config-data" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.293145 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.334355 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 01 06:37:00 crc kubenswrapper[4747]: E1001 06:37:00.335043 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-data dev etc-iscsi etc-nvme glance glance-cache httpd-run kube-api-access-skgmf lib-modules logs run scripts sys var-locks-brick], unattached volumes=[], failed to process volumes=[]: context canceled" pod="glance-kuttl-tests/glance-default-external-api-0" podUID="a8bed837-a357-418f-a170-fa88244b35e7" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.357787 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-config-data\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.357883 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.357995 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-scripts\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358038 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358082 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358172 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358223 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358252 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358310 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-dev\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358355 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-run\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358391 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-logs\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358485 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-sys\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358551 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.358600 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skgmf\" (UniqueName: \"kubernetes.io/projected/a8bed837-a357-418f-a170-fa88244b35e7-kube-api-access-skgmf\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.394693 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.397585 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.399630 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-internal-config-data" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.417969 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.430666 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.447933 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.461696 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-run\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.461763 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-logs\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.461799 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-sys\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.461843 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-config-data\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.461874 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-sys\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.461925 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-sys\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.461968 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462120 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462180 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-logs\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462282 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-dev\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462315 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-logs\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462338 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462407 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462486 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-scripts\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462522 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skgmf\" (UniqueName: \"kubernetes.io/projected/a8bed837-a357-418f-a170-fa88244b35e7-kube-api-access-skgmf\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462490 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-run\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462562 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462610 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-config-data\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462636 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-run\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462691 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462730 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462778 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462822 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-scripts\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462844 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") device mount path \"/mnt/openstack/pv03\"" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462855 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462889 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.462921 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463167 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4p2h\" (UniqueName: \"kubernetes.io/projected/40c41037-ede7-4391-88d5-21c6724de957-kube-api-access-v4p2h\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463278 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463454 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463464 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463645 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463783 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463876 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-dev\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463960 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.464109 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") device mount path \"/mnt/openstack/pv04\"" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.464226 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-dev\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.463511 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.464000 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.468431 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-scripts\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.470939 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-config-data\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.487324 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skgmf\" (UniqueName: \"kubernetes.io/projected/a8bed837-a357-418f-a170-fa88244b35e7-kube-api-access-skgmf\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.496359 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.507022 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565277 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-nvme\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565339 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-run\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565396 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-httpd-run\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565395 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565429 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-var-locks-brick\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565473 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565494 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565509 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-run" (OuterVolumeSpecName: "run") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565543 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skgmf\" (UniqueName: \"kubernetes.io/projected/a8bed837-a357-418f-a170-fa88244b35e7-kube-api-access-skgmf\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565574 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-iscsi\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565663 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565709 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-sys\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565741 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-lib-modules\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565799 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-dev\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565806 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565836 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565844 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-config-data\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565872 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-scripts\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.565893 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-logs\") pod \"a8bed837-a357-418f-a170-fa88244b35e7\" (UID: \"a8bed837-a357-418f-a170-fa88244b35e7\") " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566130 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566162 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566230 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4p2h\" (UniqueName: \"kubernetes.io/projected/40c41037-ede7-4391-88d5-21c6724de957-kube-api-access-v4p2h\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566279 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566330 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-sys\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566371 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-config-data\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566413 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566436 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566454 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-logs\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566492 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-dev\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566520 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566565 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-scripts\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566593 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566632 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-run\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566701 4747 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566717 4747 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566729 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566741 4747 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566771 4747 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.566821 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-run\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.567209 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.567257 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-dev\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.567844 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-logs\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.568009 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-sys\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.568482 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.569497 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-logs" (OuterVolumeSpecName: "logs") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.569598 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8bed837-a357-418f-a170-fa88244b35e7-kube-api-access-skgmf" (OuterVolumeSpecName: "kube-api-access-skgmf") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "kube-api-access-skgmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.569675 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.569795 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") device mount path \"/mnt/openstack/pv05\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.570201 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") device mount path \"/mnt/openstack/pv09\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.570574 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.571039 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.571102 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-sys" (OuterVolumeSpecName: "sys") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.571116 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-dev" (OuterVolumeSpecName: "dev") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.571543 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance-cache") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.572840 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-scripts\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.571124 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.574513 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-config-data\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.587779 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-config-data" (OuterVolumeSpecName: "config-data") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.593420 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.594405 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4p2h\" (UniqueName: \"kubernetes.io/projected/40c41037-ede7-4391-88d5-21c6724de957-kube-api-access-v4p2h\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.597107 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-scripts" (OuterVolumeSpecName: "scripts") pod "a8bed837-a357-418f-a170-fa88244b35e7" (UID: "a8bed837-a357-418f-a170-fa88244b35e7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.607049 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.629184 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.668092 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.668610 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skgmf\" (UniqueName: \"kubernetes.io/projected/a8bed837-a357-418f-a170-fa88244b35e7-kube-api-access-skgmf\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.668713 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.668809 4747 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-sys\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.668883 4747 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.668953 4747 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/a8bed837-a357-418f-a170-fa88244b35e7-dev\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.669021 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.669101 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8bed837-a357-418f-a170-fa88244b35e7-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.669172 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8bed837-a357-418f-a170-fa88244b35e7-logs\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.680387 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.681645 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.713551 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.770735 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:00 crc kubenswrapper[4747]: I1001 06:37:00.770815 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.197943 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.444294 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.445446 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"40c41037-ede7-4391-88d5-21c6724de957","Type":"ContainerStarted","Data":"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d"} Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.447001 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"40c41037-ede7-4391-88d5-21c6724de957","Type":"ContainerStarted","Data":"2c312e93d791a6b2e94ced3e34b9f92b90af56db84bd243b24e826eb4b592da2"} Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.511290 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.517008 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.540200 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.541484 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.544021 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-external-config-data" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.563276 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587515 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587553 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587580 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlx54\" (UniqueName: \"kubernetes.io/projected/5580ae54-87c6-4762-b717-dacdd7195382-kube-api-access-xlx54\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587635 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587683 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5580ae54-87c6-4762-b717-dacdd7195382-logs\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587701 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5580ae54-87c6-4762-b717-dacdd7195382-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587721 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-run\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587737 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587780 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.587805 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5580ae54-87c6-4762-b717-dacdd7195382-config-data\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.588003 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-dev\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.588137 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5580ae54-87c6-4762-b717-dacdd7195382-scripts\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.588216 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.588303 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-sys\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.689803 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.689859 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-sys\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.689894 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.689920 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.689948 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlx54\" (UniqueName: \"kubernetes.io/projected/5580ae54-87c6-4762-b717-dacdd7195382-kube-api-access-xlx54\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.689977 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690026 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5580ae54-87c6-4762-b717-dacdd7195382-logs\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690047 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5580ae54-87c6-4762-b717-dacdd7195382-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690075 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-run\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690113 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690138 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") device mount path \"/mnt/openstack/pv03\"" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690157 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690190 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5580ae54-87c6-4762-b717-dacdd7195382-config-data\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690225 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-dev\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.690261 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5580ae54-87c6-4762-b717-dacdd7195382-scripts\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.691025 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5580ae54-87c6-4762-b717-dacdd7195382-logs\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.691110 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-sys\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.691162 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-lib-modules\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.691228 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-etc-nvme\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.691965 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-etc-iscsi\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.692053 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-var-locks-brick\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.692434 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5580ae54-87c6-4762-b717-dacdd7195382-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.692499 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-run\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.693544 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") device mount path \"/mnt/openstack/pv04\"" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.694351 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/5580ae54-87c6-4762-b717-dacdd7195382-dev\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.695080 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5580ae54-87c6-4762-b717-dacdd7195382-scripts\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.698164 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5580ae54-87c6-4762-b717-dacdd7195382-config-data\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.717740 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.718008 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlx54\" (UniqueName: \"kubernetes.io/projected/5580ae54-87c6-4762-b717-dacdd7195382-kube-api-access-xlx54\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.721882 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"5580ae54-87c6-4762-b717-dacdd7195382\") " pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.779585 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:01 crc kubenswrapper[4747]: I1001 06:37:01.857969 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.125400 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-external-api-0"] Oct 01 06:37:02 crc kubenswrapper[4747]: W1001 06:37:02.128395 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5580ae54_87c6_4762_b717_dacdd7195382.slice/crio-9944eb15ccd4b3c9a39d7ccb6c08a1bc63b5a3003a44fbf48075e8404413a3ef WatchSource:0}: Error finding container 9944eb15ccd4b3c9a39d7ccb6c08a1bc63b5a3003a44fbf48075e8404413a3ef: Status 404 returned error can't find the container with id 9944eb15ccd4b3c9a39d7ccb6c08a1bc63b5a3003a44fbf48075e8404413a3ef Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.457366 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"5580ae54-87c6-4762-b717-dacdd7195382","Type":"ContainerStarted","Data":"bf6cf34b07a5ffe2e70606041cd65e9a0ff01529af5217ee9d0cc98325ee55e1"} Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.457627 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"5580ae54-87c6-4762-b717-dacdd7195382","Type":"ContainerStarted","Data":"9944eb15ccd4b3c9a39d7ccb6c08a1bc63b5a3003a44fbf48075e8404413a3ef"} Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.459958 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"40c41037-ede7-4391-88d5-21c6724de957","Type":"ContainerStarted","Data":"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86"} Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.459995 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"40c41037-ede7-4391-88d5-21c6724de957","Type":"ContainerStarted","Data":"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee"} Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.460104 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-log" containerID="cri-o://b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d" gracePeriod=30 Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.460388 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-api" containerID="cri-o://dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86" gracePeriod=30 Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.460448 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="glance-kuttl-tests/glance-default-internal-api-0" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-httpd" containerID="cri-o://cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee" gracePeriod=30 Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.492126 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.492104303 podStartE2EDuration="3.492104303s" podCreationTimestamp="2025-10-01 06:36:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:37:02.490255188 +0000 UTC m=+1223.899912277" watchObservedRunningTime="2025-10-01 06:37:02.492104303 +0000 UTC m=+1223.901761392" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.883483 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906055 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-scripts\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906112 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-iscsi\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906155 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-dev\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906176 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4p2h\" (UniqueName: \"kubernetes.io/projected/40c41037-ede7-4391-88d5-21c6724de957-kube-api-access-v4p2h\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906204 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-logs\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906222 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906234 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance-cache\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906263 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-nvme\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906279 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-var-locks-brick\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906301 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-sys\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906318 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-httpd-run\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906364 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-lib-modules\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906386 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-run\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.906425 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-config-data\") pod \"40c41037-ede7-4391-88d5-21c6724de957\" (UID: \"40c41037-ede7-4391-88d5-21c6724de957\") " Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907554 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-nvme" (OuterVolumeSpecName: "etc-nvme") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "etc-nvme". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907565 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-sys" (OuterVolumeSpecName: "sys") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "sys". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907636 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-lib-modules" (OuterVolumeSpecName: "lib-modules") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "lib-modules". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907636 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-var-locks-brick" (OuterVolumeSpecName: "var-locks-brick") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "var-locks-brick". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907687 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-run" (OuterVolumeSpecName: "run") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907717 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-iscsi" (OuterVolumeSpecName: "etc-iscsi") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "etc-iscsi". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907742 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-dev" (OuterVolumeSpecName: "dev") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "dev". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907850 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.907840 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-logs" (OuterVolumeSpecName: "logs") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.911938 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance-cache") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.912204 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.918672 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40c41037-ede7-4391-88d5-21c6724de957-kube-api-access-v4p2h" (OuterVolumeSpecName: "kube-api-access-v4p2h") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "kube-api-access-v4p2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.921913 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-scripts" (OuterVolumeSpecName: "scripts") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:37:02 crc kubenswrapper[4747]: I1001 06:37:02.979141 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-config-data" (OuterVolumeSpecName: "config-data") pod "40c41037-ede7-4391-88d5-21c6724de957" (UID: "40c41037-ede7-4391-88d5-21c6724de957"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008046 4747 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-config-data\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008078 4747 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40c41037-ede7-4391-88d5-21c6724de957-scripts\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008087 4747 reconciler_common.go:293] "Volume detached for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-iscsi\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008097 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4p2h\" (UniqueName: \"kubernetes.io/projected/40c41037-ede7-4391-88d5-21c6724de957-kube-api-access-v4p2h\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008106 4747 reconciler_common.go:293] "Volume detached for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-dev\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008114 4747 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-logs\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008143 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008155 4747 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008164 4747 reconciler_common.go:293] "Volume detached for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-etc-nvme\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008172 4747 reconciler_common.go:293] "Volume detached for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-var-locks-brick\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008180 4747 reconciler_common.go:293] "Volume detached for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-sys\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008189 4747 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/40c41037-ede7-4391-88d5-21c6724de957-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008198 4747 reconciler_common.go:293] "Volume detached for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-lib-modules\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.008206 4747 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/host-path/40c41037-ede7-4391-88d5-21c6724de957-run\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.020361 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.021654 4747 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.111295 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.111811 4747 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.289865 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8bed837-a357-418f-a170-fa88244b35e7" path="/var/lib/kubelet/pods/a8bed837-a357-418f-a170-fa88244b35e7/volumes" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.482285 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"5580ae54-87c6-4762-b717-dacdd7195382","Type":"ContainerStarted","Data":"e8c4e2a03f86cae91e4774dec3477a1f0a25c9c06b566b9cac245916cf0c3fcf"} Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.482363 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-external-api-0" event={"ID":"5580ae54-87c6-4762-b717-dacdd7195382","Type":"ContainerStarted","Data":"218c2e540d7fd8f6a1ae69bc68c76f5c2f91b1fea19cba00231c384dde24d113"} Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493185 4747 generic.go:334] "Generic (PLEG): container finished" podID="40c41037-ede7-4391-88d5-21c6724de957" containerID="dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86" exitCode=143 Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493227 4747 generic.go:334] "Generic (PLEG): container finished" podID="40c41037-ede7-4391-88d5-21c6724de957" containerID="cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee" exitCode=143 Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493239 4747 generic.go:334] "Generic (PLEG): container finished" podID="40c41037-ede7-4391-88d5-21c6724de957" containerID="b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d" exitCode=143 Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493267 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"40c41037-ede7-4391-88d5-21c6724de957","Type":"ContainerDied","Data":"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86"} Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493301 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"40c41037-ede7-4391-88d5-21c6724de957","Type":"ContainerDied","Data":"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee"} Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493314 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"40c41037-ede7-4391-88d5-21c6724de957","Type":"ContainerDied","Data":"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d"} Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493310 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493340 4747 scope.go:117] "RemoveContainer" containerID="dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.493327 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"40c41037-ede7-4391-88d5-21c6724de957","Type":"ContainerDied","Data":"2c312e93d791a6b2e94ced3e34b9f92b90af56db84bd243b24e826eb4b592da2"} Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.537108 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-external-api-0" podStartSLOduration=2.537072131 podStartE2EDuration="2.537072131s" podCreationTimestamp="2025-10-01 06:37:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:37:03.516096805 +0000 UTC m=+1224.925753924" watchObservedRunningTime="2025-10-01 06:37:03.537072131 +0000 UTC m=+1224.946729210" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.545377 4747 scope.go:117] "RemoveContainer" containerID="cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.558097 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.569941 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.578272 4747 scope.go:117] "RemoveContainer" containerID="b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.594393 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:03 crc kubenswrapper[4747]: E1001 06:37:03.595543 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-httpd" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.595570 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-httpd" Oct 01 06:37:03 crc kubenswrapper[4747]: E1001 06:37:03.595593 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-log" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.595603 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-log" Oct 01 06:37:03 crc kubenswrapper[4747]: E1001 06:37:03.595623 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-api" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.595633 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-api" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.595824 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-api" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.595847 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-httpd" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.595860 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="40c41037-ede7-4391-88d5-21c6724de957" containerName="glance-log" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.598440 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.601016 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"glance-default-internal-config-data" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.605072 4747 scope.go:117] "RemoveContainer" containerID="dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86" Oct 01 06:37:03 crc kubenswrapper[4747]: E1001 06:37:03.605941 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86\": container with ID starting with dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86 not found: ID does not exist" containerID="dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.605990 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86"} err="failed to get container status \"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86\": rpc error: code = NotFound desc = could not find container \"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86\": container with ID starting with dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86 not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.606025 4747 scope.go:117] "RemoveContainer" containerID="cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee" Oct 01 06:37:03 crc kubenswrapper[4747]: E1001 06:37:03.606409 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee\": container with ID starting with cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee not found: ID does not exist" containerID="cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.606453 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee"} err="failed to get container status \"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee\": rpc error: code = NotFound desc = could not find container \"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee\": container with ID starting with cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.606486 4747 scope.go:117] "RemoveContainer" containerID="b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d" Oct 01 06:37:03 crc kubenswrapper[4747]: E1001 06:37:03.606784 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d\": container with ID starting with b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d not found: ID does not exist" containerID="b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.606821 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d"} err="failed to get container status \"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d\": rpc error: code = NotFound desc = could not find container \"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d\": container with ID starting with b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.606844 4747 scope.go:117] "RemoveContainer" containerID="dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.607095 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86"} err="failed to get container status \"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86\": rpc error: code = NotFound desc = could not find container \"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86\": container with ID starting with dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86 not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.607125 4747 scope.go:117] "RemoveContainer" containerID="cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.607338 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee"} err="failed to get container status \"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee\": rpc error: code = NotFound desc = could not find container \"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee\": container with ID starting with cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.607354 4747 scope.go:117] "RemoveContainer" containerID="b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.607620 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d"} err="failed to get container status \"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d\": rpc error: code = NotFound desc = could not find container \"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d\": container with ID starting with b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.607647 4747 scope.go:117] "RemoveContainer" containerID="dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.608162 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86"} err="failed to get container status \"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86\": rpc error: code = NotFound desc = could not find container \"dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86\": container with ID starting with dd807c9c7930e866e52b976b6102fc7c34181123b77a67f56f94e0436260fc86 not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.608190 4747 scope.go:117] "RemoveContainer" containerID="cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.608428 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee"} err="failed to get container status \"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee\": rpc error: code = NotFound desc = could not find container \"cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee\": container with ID starting with cda547a237f00e428c4884b23eb47e9157798a541699732401e130c552f127ee not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.608449 4747 scope.go:117] "RemoveContainer" containerID="b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.608609 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d"} err="failed to get container status \"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d\": rpc error: code = NotFound desc = could not find container \"b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d\": container with ID starting with b4ac9de3a921529603da70c118731f1222c623b2bcccede0851c12d90167077d not found: ID does not exist" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.616482 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.722713 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.722806 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.722845 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce3a717a-3933-4866-8d52-63c2b160db8f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.722880 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.722937 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.722973 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-dev\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.723014 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-sys\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.723130 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-run\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.723192 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce3a717a-3933-4866-8d52-63c2b160db8f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.723288 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce3a717a-3933-4866-8d52-63c2b160db8f-logs\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.723348 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4txqb\" (UniqueName: \"kubernetes.io/projected/ce3a717a-3933-4866-8d52-63c2b160db8f-kube-api-access-4txqb\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.723419 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce3a717a-3933-4866-8d52-63c2b160db8f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.723470 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.723542 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.825878 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.825961 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce3a717a-3933-4866-8d52-63c2b160db8f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826001 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826064 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826105 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-dev\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826140 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-sys\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826186 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-run\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826220 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce3a717a-3933-4866-8d52-63c2b160db8f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826264 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce3a717a-3933-4866-8d52-63c2b160db8f-logs\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826311 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4txqb\" (UniqueName: \"kubernetes.io/projected/ce3a717a-3933-4866-8d52-63c2b160db8f-kube-api-access-4txqb\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826359 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce3a717a-3933-4866-8d52-63c2b160db8f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826401 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826450 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.826579 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.827156 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") device mount path \"/mnt/openstack/pv09\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.828064 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-run\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.828110 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-etc-nvme\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.828178 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-lib-modules\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.828195 4747 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") device mount path \"/mnt/openstack/pv05\"" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.828242 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-etc-iscsi\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.828251 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-sys\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.828326 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-var-locks-brick\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.828357 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/ce3a717a-3933-4866-8d52-63c2b160db8f-dev\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.830154 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce3a717a-3933-4866-8d52-63c2b160db8f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.830522 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce3a717a-3933-4866-8d52-63c2b160db8f-logs\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.846237 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce3a717a-3933-4866-8d52-63c2b160db8f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.857840 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce3a717a-3933-4866-8d52-63c2b160db8f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.862400 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4txqb\" (UniqueName: \"kubernetes.io/projected/ce3a717a-3933-4866-8d52-63c2b160db8f-kube-api-access-4txqb\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.873946 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:03 crc kubenswrapper[4747]: I1001 06:37:03.931412 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce3a717a-3933-4866-8d52-63c2b160db8f\") " pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:04 crc kubenswrapper[4747]: I1001 06:37:04.217744 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:04 crc kubenswrapper[4747]: I1001 06:37:04.731499 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/glance-default-internal-api-0"] Oct 01 06:37:04 crc kubenswrapper[4747]: W1001 06:37:04.737664 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce3a717a_3933_4866_8d52_63c2b160db8f.slice/crio-8c8c7731ab29c6488ff9527b37dac8cf41e5803e0593b0547d3d3fa782aac001 WatchSource:0}: Error finding container 8c8c7731ab29c6488ff9527b37dac8cf41e5803e0593b0547d3d3fa782aac001: Status 404 returned error can't find the container with id 8c8c7731ab29c6488ff9527b37dac8cf41e5803e0593b0547d3d3fa782aac001 Oct 01 06:37:05 crc kubenswrapper[4747]: I1001 06:37:05.294362 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40c41037-ede7-4391-88d5-21c6724de957" path="/var/lib/kubelet/pods/40c41037-ede7-4391-88d5-21c6724de957/volumes" Oct 01 06:37:05 crc kubenswrapper[4747]: I1001 06:37:05.525124 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"ce3a717a-3933-4866-8d52-63c2b160db8f","Type":"ContainerStarted","Data":"5d891ceb299080df5c3d0f2d4e5d3c4ece1a98d69dd22d3efaf63a696d6dcc07"} Oct 01 06:37:05 crc kubenswrapper[4747]: I1001 06:37:05.525172 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"ce3a717a-3933-4866-8d52-63c2b160db8f","Type":"ContainerStarted","Data":"d8c61fa63dd79be1eeda50bee30feceef06d3dc97a0b25cc322422622f303d4c"} Oct 01 06:37:05 crc kubenswrapper[4747]: I1001 06:37:05.525183 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"ce3a717a-3933-4866-8d52-63c2b160db8f","Type":"ContainerStarted","Data":"184d57e8f3dde1b168438cfda4665959cd1dea1adf87c43c1ce0af361034c288"} Oct 01 06:37:05 crc kubenswrapper[4747]: I1001 06:37:05.525191 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/glance-default-internal-api-0" event={"ID":"ce3a717a-3933-4866-8d52-63c2b160db8f","Type":"ContainerStarted","Data":"8c8c7731ab29c6488ff9527b37dac8cf41e5803e0593b0547d3d3fa782aac001"} Oct 01 06:37:05 crc kubenswrapper[4747]: I1001 06:37:05.562968 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.5629418 podStartE2EDuration="2.5629418s" podCreationTimestamp="2025-10-01 06:37:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:37:05.559934356 +0000 UTC m=+1226.969591405" watchObservedRunningTime="2025-10-01 06:37:05.5629418 +0000 UTC m=+1226.972598869" Oct 01 06:37:05 crc kubenswrapper[4747]: I1001 06:37:05.761628 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:37:05 crc kubenswrapper[4747]: I1001 06:37:05.762004 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:37:11 crc kubenswrapper[4747]: I1001 06:37:11.859075 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:11 crc kubenswrapper[4747]: I1001 06:37:11.859905 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:11 crc kubenswrapper[4747]: I1001 06:37:11.860028 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:11 crc kubenswrapper[4747]: I1001 06:37:11.900969 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:11 crc kubenswrapper[4747]: I1001 06:37:11.902429 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:11 crc kubenswrapper[4747]: I1001 06:37:11.932075 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:12 crc kubenswrapper[4747]: I1001 06:37:12.609576 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:12 crc kubenswrapper[4747]: I1001 06:37:12.609957 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:12 crc kubenswrapper[4747]: I1001 06:37:12.609977 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:12 crc kubenswrapper[4747]: I1001 06:37:12.625925 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:12 crc kubenswrapper[4747]: I1001 06:37:12.630418 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:12 crc kubenswrapper[4747]: I1001 06:37:12.630679 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-external-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.219500 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.220254 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.220318 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.256325 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.268263 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.294933 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.632872 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.632920 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.632936 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.647309 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.651149 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:14 crc kubenswrapper[4747]: I1001 06:37:14.651399 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="glance-kuttl-tests/glance-default-internal-api-0" Oct 01 06:37:35 crc kubenswrapper[4747]: I1001 06:37:35.761218 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:37:35 crc kubenswrapper[4747]: I1001 06:37:35.761809 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:37:35 crc kubenswrapper[4747]: I1001 06:37:35.761872 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:37:35 crc kubenswrapper[4747]: I1001 06:37:35.762591 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f2895e753844a4a5cdf39762ff3e165f0014207b47172cc667faca0e96eb7319"} pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:37:35 crc kubenswrapper[4747]: I1001 06:37:35.762661 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" containerID="cri-o://f2895e753844a4a5cdf39762ff3e165f0014207b47172cc667faca0e96eb7319" gracePeriod=600 Oct 01 06:37:36 crc kubenswrapper[4747]: I1001 06:37:36.888941 4747 generic.go:334] "Generic (PLEG): container finished" podID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerID="f2895e753844a4a5cdf39762ff3e165f0014207b47172cc667faca0e96eb7319" exitCode=0 Oct 01 06:37:36 crc kubenswrapper[4747]: I1001 06:37:36.888999 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerDied","Data":"f2895e753844a4a5cdf39762ff3e165f0014207b47172cc667faca0e96eb7319"} Oct 01 06:37:36 crc kubenswrapper[4747]: I1001 06:37:36.889489 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"401ec113c485c56e663feb5d9e61759b53e4a33a866f086f08fbca2a246a142f"} Oct 01 06:37:36 crc kubenswrapper[4747]: I1001 06:37:36.889532 4747 scope.go:117] "RemoveContainer" containerID="c7fef4e888b7d3576c874bccbc790853fe54a02c39edee04d55581bad2028dc9" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.158845 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n9qlq"] Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.166901 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.169508 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9qlq"] Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.247631 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-catalog-content\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.248188 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m2vc\" (UniqueName: \"kubernetes.io/projected/bd667810-e3fb-4e61-8309-5e68e6e4ba61-kube-api-access-9m2vc\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.248220 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-utilities\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.350175 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-catalog-content\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.350305 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m2vc\" (UniqueName: \"kubernetes.io/projected/bd667810-e3fb-4e61-8309-5e68e6e4ba61-kube-api-access-9m2vc\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.350519 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-utilities\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.351029 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-catalog-content\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.351385 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-utilities\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.370805 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m2vc\" (UniqueName: \"kubernetes.io/projected/bd667810-e3fb-4e61-8309-5e68e6e4ba61-kube-api-access-9m2vc\") pod \"redhat-marketplace-n9qlq\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.497359 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:39:58 crc kubenswrapper[4747]: I1001 06:39:58.706028 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9qlq"] Oct 01 06:39:59 crc kubenswrapper[4747]: I1001 06:39:59.396896 4747 generic.go:334] "Generic (PLEG): container finished" podID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerID="0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20" exitCode=0 Oct 01 06:39:59 crc kubenswrapper[4747]: I1001 06:39:59.397166 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9qlq" event={"ID":"bd667810-e3fb-4e61-8309-5e68e6e4ba61","Type":"ContainerDied","Data":"0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20"} Oct 01 06:39:59 crc kubenswrapper[4747]: I1001 06:39:59.397201 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9qlq" event={"ID":"bd667810-e3fb-4e61-8309-5e68e6e4ba61","Type":"ContainerStarted","Data":"f500fa606959edc76cd08e6b5e8c2e5df5df919613e38555d49d3d604cbf3939"} Oct 01 06:39:59 crc kubenswrapper[4747]: I1001 06:39:59.398602 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:40:00 crc kubenswrapper[4747]: I1001 06:40:00.411838 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9qlq" event={"ID":"bd667810-e3fb-4e61-8309-5e68e6e4ba61","Type":"ContainerStarted","Data":"036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7"} Oct 01 06:40:01 crc kubenswrapper[4747]: I1001 06:40:01.423544 4747 generic.go:334] "Generic (PLEG): container finished" podID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerID="036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7" exitCode=0 Oct 01 06:40:01 crc kubenswrapper[4747]: I1001 06:40:01.423604 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9qlq" event={"ID":"bd667810-e3fb-4e61-8309-5e68e6e4ba61","Type":"ContainerDied","Data":"036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7"} Oct 01 06:40:02 crc kubenswrapper[4747]: I1001 06:40:02.431861 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9qlq" event={"ID":"bd667810-e3fb-4e61-8309-5e68e6e4ba61","Type":"ContainerStarted","Data":"7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4"} Oct 01 06:40:02 crc kubenswrapper[4747]: I1001 06:40:02.460485 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n9qlq" podStartSLOduration=1.794423289 podStartE2EDuration="4.460464217s" podCreationTimestamp="2025-10-01 06:39:58 +0000 UTC" firstStartedPulling="2025-10-01 06:39:59.398343924 +0000 UTC m=+1400.808000983" lastFinishedPulling="2025-10-01 06:40:02.064384832 +0000 UTC m=+1403.474041911" observedRunningTime="2025-10-01 06:40:02.45361248 +0000 UTC m=+1403.863269549" watchObservedRunningTime="2025-10-01 06:40:02.460464217 +0000 UTC m=+1403.870121266" Oct 01 06:40:05 crc kubenswrapper[4747]: I1001 06:40:05.761480 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:40:05 crc kubenswrapper[4747]: I1001 06:40:05.762046 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:40:08 crc kubenswrapper[4747]: I1001 06:40:08.498424 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:40:08 crc kubenswrapper[4747]: I1001 06:40:08.500071 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:40:08 crc kubenswrapper[4747]: I1001 06:40:08.586010 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:40:09 crc kubenswrapper[4747]: I1001 06:40:09.574474 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:40:09 crc kubenswrapper[4747]: I1001 06:40:09.657055 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9qlq"] Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.253692 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ztsqt"] Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.256040 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.262115 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ztsqt"] Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.389329 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-utilities\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.389402 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxvzb\" (UniqueName: \"kubernetes.io/projected/d8e66afb-d831-4980-9278-f50e0c78c8e9-kube-api-access-wxvzb\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.389434 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-catalog-content\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.490599 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-utilities\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.490724 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxvzb\" (UniqueName: \"kubernetes.io/projected/d8e66afb-d831-4980-9278-f50e0c78c8e9-kube-api-access-wxvzb\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.490792 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-catalog-content\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.491294 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-utilities\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.491339 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-catalog-content\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.514351 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxvzb\" (UniqueName: \"kubernetes.io/projected/d8e66afb-d831-4980-9278-f50e0c78c8e9-kube-api-access-wxvzb\") pod \"community-operators-ztsqt\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.523597 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n9qlq" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerName="registry-server" containerID="cri-o://7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4" gracePeriod=2 Oct 01 06:40:11 crc kubenswrapper[4747]: I1001 06:40:11.580502 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.013239 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.066056 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ztsqt"] Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.100396 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-catalog-content\") pod \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.106999 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9m2vc\" (UniqueName: \"kubernetes.io/projected/bd667810-e3fb-4e61-8309-5e68e6e4ba61-kube-api-access-9m2vc\") pod \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.107119 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-utilities\") pod \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\" (UID: \"bd667810-e3fb-4e61-8309-5e68e6e4ba61\") " Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.107946 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-utilities" (OuterVolumeSpecName: "utilities") pod "bd667810-e3fb-4e61-8309-5e68e6e4ba61" (UID: "bd667810-e3fb-4e61-8309-5e68e6e4ba61"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.108138 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd667810-e3fb-4e61-8309-5e68e6e4ba61-kube-api-access-9m2vc" (OuterVolumeSpecName: "kube-api-access-9m2vc") pod "bd667810-e3fb-4e61-8309-5e68e6e4ba61" (UID: "bd667810-e3fb-4e61-8309-5e68e6e4ba61"). InnerVolumeSpecName "kube-api-access-9m2vc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.113082 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd667810-e3fb-4e61-8309-5e68e6e4ba61" (UID: "bd667810-e3fb-4e61-8309-5e68e6e4ba61"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.209083 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.209145 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9m2vc\" (UniqueName: \"kubernetes.io/projected/bd667810-e3fb-4e61-8309-5e68e6e4ba61-kube-api-access-9m2vc\") on node \"crc\" DevicePath \"\"" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.209172 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd667810-e3fb-4e61-8309-5e68e6e4ba61-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.538327 4747 generic.go:334] "Generic (PLEG): container finished" podID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerID="7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4" exitCode=0 Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.538423 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9qlq" event={"ID":"bd667810-e3fb-4e61-8309-5e68e6e4ba61","Type":"ContainerDied","Data":"7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4"} Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.538463 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n9qlq" event={"ID":"bd667810-e3fb-4e61-8309-5e68e6e4ba61","Type":"ContainerDied","Data":"f500fa606959edc76cd08e6b5e8c2e5df5df919613e38555d49d3d604cbf3939"} Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.538491 4747 scope.go:117] "RemoveContainer" containerID="7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.538651 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n9qlq" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.542140 4747 generic.go:334] "Generic (PLEG): container finished" podID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerID="da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7" exitCode=0 Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.542182 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztsqt" event={"ID":"d8e66afb-d831-4980-9278-f50e0c78c8e9","Type":"ContainerDied","Data":"da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7"} Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.542216 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztsqt" event={"ID":"d8e66afb-d831-4980-9278-f50e0c78c8e9","Type":"ContainerStarted","Data":"c2be64c537e3020e6d5195903b99eb61c56be5a88fed90578d7ecd130fec30b9"} Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.568095 4747 scope.go:117] "RemoveContainer" containerID="036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.606564 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9qlq"] Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.606692 4747 scope.go:117] "RemoveContainer" containerID="0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.624115 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n9qlq"] Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.629085 4747 scope.go:117] "RemoveContainer" containerID="7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4" Oct 01 06:40:12 crc kubenswrapper[4747]: E1001 06:40:12.629513 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4\": container with ID starting with 7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4 not found: ID does not exist" containerID="7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.629550 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4"} err="failed to get container status \"7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4\": rpc error: code = NotFound desc = could not find container \"7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4\": container with ID starting with 7d7a1d19b9b76d00a0a84c3b543310f6cb766e699a412ec9d55034105e396bc4 not found: ID does not exist" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.629577 4747 scope.go:117] "RemoveContainer" containerID="036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7" Oct 01 06:40:12 crc kubenswrapper[4747]: E1001 06:40:12.630082 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7\": container with ID starting with 036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7 not found: ID does not exist" containerID="036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.630110 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7"} err="failed to get container status \"036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7\": rpc error: code = NotFound desc = could not find container \"036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7\": container with ID starting with 036f0a597890efc17d43dcefd75fc005f6e33be028a1f8f135583378a01e0ba7 not found: ID does not exist" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.630127 4747 scope.go:117] "RemoveContainer" containerID="0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20" Oct 01 06:40:12 crc kubenswrapper[4747]: E1001 06:40:12.630431 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20\": container with ID starting with 0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20 not found: ID does not exist" containerID="0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20" Oct 01 06:40:12 crc kubenswrapper[4747]: I1001 06:40:12.630451 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20"} err="failed to get container status \"0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20\": rpc error: code = NotFound desc = could not find container \"0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20\": container with ID starting with 0124fe7acbefca1780cf22f407235e1f25d341246005ba50b86e95b2cd0bea20 not found: ID does not exist" Oct 01 06:40:13 crc kubenswrapper[4747]: I1001 06:40:13.295340 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" path="/var/lib/kubelet/pods/bd667810-e3fb-4e61-8309-5e68e6e4ba61/volumes" Oct 01 06:40:13 crc kubenswrapper[4747]: I1001 06:40:13.554862 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztsqt" event={"ID":"d8e66afb-d831-4980-9278-f50e0c78c8e9","Type":"ContainerStarted","Data":"a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423"} Oct 01 06:40:14 crc kubenswrapper[4747]: I1001 06:40:14.569887 4747 generic.go:334] "Generic (PLEG): container finished" podID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerID="a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423" exitCode=0 Oct 01 06:40:14 crc kubenswrapper[4747]: I1001 06:40:14.570464 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztsqt" event={"ID":"d8e66afb-d831-4980-9278-f50e0c78c8e9","Type":"ContainerDied","Data":"a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423"} Oct 01 06:40:15 crc kubenswrapper[4747]: I1001 06:40:15.584500 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztsqt" event={"ID":"d8e66afb-d831-4980-9278-f50e0c78c8e9","Type":"ContainerStarted","Data":"6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7"} Oct 01 06:40:15 crc kubenswrapper[4747]: I1001 06:40:15.614358 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ztsqt" podStartSLOduration=1.936590698 podStartE2EDuration="4.614338411s" podCreationTimestamp="2025-10-01 06:40:11 +0000 UTC" firstStartedPulling="2025-10-01 06:40:12.544595432 +0000 UTC m=+1413.954252501" lastFinishedPulling="2025-10-01 06:40:15.222343135 +0000 UTC m=+1416.632000214" observedRunningTime="2025-10-01 06:40:15.60901946 +0000 UTC m=+1417.018676549" watchObservedRunningTime="2025-10-01 06:40:15.614338411 +0000 UTC m=+1417.023995470" Oct 01 06:40:21 crc kubenswrapper[4747]: I1001 06:40:21.581561 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:21 crc kubenswrapper[4747]: I1001 06:40:21.582230 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:21 crc kubenswrapper[4747]: I1001 06:40:21.663290 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:21 crc kubenswrapper[4747]: I1001 06:40:21.756865 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:21 crc kubenswrapper[4747]: I1001 06:40:21.919496 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ztsqt"] Oct 01 06:40:23 crc kubenswrapper[4747]: I1001 06:40:23.665153 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ztsqt" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerName="registry-server" containerID="cri-o://6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7" gracePeriod=2 Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.188513 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.291782 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxvzb\" (UniqueName: \"kubernetes.io/projected/d8e66afb-d831-4980-9278-f50e0c78c8e9-kube-api-access-wxvzb\") pod \"d8e66afb-d831-4980-9278-f50e0c78c8e9\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.291846 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-catalog-content\") pod \"d8e66afb-d831-4980-9278-f50e0c78c8e9\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.291996 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-utilities\") pod \"d8e66afb-d831-4980-9278-f50e0c78c8e9\" (UID: \"d8e66afb-d831-4980-9278-f50e0c78c8e9\") " Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.293149 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-utilities" (OuterVolumeSpecName: "utilities") pod "d8e66afb-d831-4980-9278-f50e0c78c8e9" (UID: "d8e66afb-d831-4980-9278-f50e0c78c8e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.298242 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8e66afb-d831-4980-9278-f50e0c78c8e9-kube-api-access-wxvzb" (OuterVolumeSpecName: "kube-api-access-wxvzb") pod "d8e66afb-d831-4980-9278-f50e0c78c8e9" (UID: "d8e66afb-d831-4980-9278-f50e0c78c8e9"). InnerVolumeSpecName "kube-api-access-wxvzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.394113 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.394164 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxvzb\" (UniqueName: \"kubernetes.io/projected/d8e66afb-d831-4980-9278-f50e0c78c8e9-kube-api-access-wxvzb\") on node \"crc\" DevicePath \"\"" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.677299 4747 generic.go:334] "Generic (PLEG): container finished" podID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerID="6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7" exitCode=0 Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.677363 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztsqt" event={"ID":"d8e66afb-d831-4980-9278-f50e0c78c8e9","Type":"ContainerDied","Data":"6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7"} Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.677406 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ztsqt" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.677432 4747 scope.go:117] "RemoveContainer" containerID="6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.677411 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ztsqt" event={"ID":"d8e66afb-d831-4980-9278-f50e0c78c8e9","Type":"ContainerDied","Data":"c2be64c537e3020e6d5195903b99eb61c56be5a88fed90578d7ecd130fec30b9"} Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.704233 4747 scope.go:117] "RemoveContainer" containerID="a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.730894 4747 scope.go:117] "RemoveContainer" containerID="da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.776778 4747 scope.go:117] "RemoveContainer" containerID="6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7" Oct 01 06:40:24 crc kubenswrapper[4747]: E1001 06:40:24.777391 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7\": container with ID starting with 6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7 not found: ID does not exist" containerID="6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.777458 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7"} err="failed to get container status \"6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7\": rpc error: code = NotFound desc = could not find container \"6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7\": container with ID starting with 6fda1d7ddcced5985c0d65abdaecbf4a00a74c8c27a33695f1b8a93658bd40f7 not found: ID does not exist" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.777490 4747 scope.go:117] "RemoveContainer" containerID="a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423" Oct 01 06:40:24 crc kubenswrapper[4747]: E1001 06:40:24.777941 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423\": container with ID starting with a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423 not found: ID does not exist" containerID="a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.777983 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423"} err="failed to get container status \"a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423\": rpc error: code = NotFound desc = could not find container \"a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423\": container with ID starting with a11209c109beff0c54f19a59a43282787c9ce32f1ac46fe480a322d27ae5e423 not found: ID does not exist" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.778008 4747 scope.go:117] "RemoveContainer" containerID="da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7" Oct 01 06:40:24 crc kubenswrapper[4747]: E1001 06:40:24.778466 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7\": container with ID starting with da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7 not found: ID does not exist" containerID="da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7" Oct 01 06:40:24 crc kubenswrapper[4747]: I1001 06:40:24.778497 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7"} err="failed to get container status \"da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7\": rpc error: code = NotFound desc = could not find container \"da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7\": container with ID starting with da65c19fe36d7079d3b888b7682bbd730e61ef777ba453017ac1ad584836d7d7 not found: ID does not exist" Oct 01 06:40:25 crc kubenswrapper[4747]: I1001 06:40:25.025240 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8e66afb-d831-4980-9278-f50e0c78c8e9" (UID: "d8e66afb-d831-4980-9278-f50e0c78c8e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:40:25 crc kubenswrapper[4747]: I1001 06:40:25.105315 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8e66afb-d831-4980-9278-f50e0c78c8e9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:40:25 crc kubenswrapper[4747]: I1001 06:40:25.335206 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ztsqt"] Oct 01 06:40:25 crc kubenswrapper[4747]: I1001 06:40:25.343720 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ztsqt"] Oct 01 06:40:27 crc kubenswrapper[4747]: I1001 06:40:27.291550 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" path="/var/lib/kubelet/pods/d8e66afb-d831-4980-9278-f50e0c78c8e9/volumes" Oct 01 06:40:35 crc kubenswrapper[4747]: I1001 06:40:35.761726 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:40:35 crc kubenswrapper[4747]: I1001 06:40:35.762438 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:40:40 crc kubenswrapper[4747]: I1001 06:40:40.121070 4747 scope.go:117] "RemoveContainer" containerID="9bacb400e449c2887aa66bf6901b6e923d972b681eed78d474e9d646edb96ade" Oct 01 06:40:40 crc kubenswrapper[4747]: I1001 06:40:40.145532 4747 scope.go:117] "RemoveContainer" containerID="2abe0eb82b02c8683238645da1672e29d59cb001d74341bab44f69470f7ddaf3" Oct 01 06:40:40 crc kubenswrapper[4747]: I1001 06:40:40.208776 4747 scope.go:117] "RemoveContainer" containerID="c1056f3e321ccb923e89f694601b4c7ade2c541ced152d34b6947e4499b2802d" Oct 01 06:40:40 crc kubenswrapper[4747]: I1001 06:40:40.227802 4747 scope.go:117] "RemoveContainer" containerID="5ead492a2a2b059bd7da20b7ee096a5c9b6f117b8bfff6475b22c1167fe075cd" Oct 01 06:41:05 crc kubenswrapper[4747]: I1001 06:41:05.761539 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:41:05 crc kubenswrapper[4747]: I1001 06:41:05.762325 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:41:05 crc kubenswrapper[4747]: I1001 06:41:05.762414 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:41:05 crc kubenswrapper[4747]: I1001 06:41:05.763358 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"401ec113c485c56e663feb5d9e61759b53e4a33a866f086f08fbca2a246a142f"} pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:41:05 crc kubenswrapper[4747]: I1001 06:41:05.763459 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" containerID="cri-o://401ec113c485c56e663feb5d9e61759b53e4a33a866f086f08fbca2a246a142f" gracePeriod=600 Oct 01 06:41:06 crc kubenswrapper[4747]: I1001 06:41:06.152890 4747 generic.go:334] "Generic (PLEG): container finished" podID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerID="401ec113c485c56e663feb5d9e61759b53e4a33a866f086f08fbca2a246a142f" exitCode=0 Oct 01 06:41:06 crc kubenswrapper[4747]: I1001 06:41:06.152936 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerDied","Data":"401ec113c485c56e663feb5d9e61759b53e4a33a866f086f08fbca2a246a142f"} Oct 01 06:41:06 crc kubenswrapper[4747]: I1001 06:41:06.153250 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03"} Oct 01 06:41:06 crc kubenswrapper[4747]: I1001 06:41:06.153271 4747 scope.go:117] "RemoveContainer" containerID="f2895e753844a4a5cdf39762ff3e165f0014207b47172cc667faca0e96eb7319" Oct 01 06:41:40 crc kubenswrapper[4747]: I1001 06:41:40.352096 4747 scope.go:117] "RemoveContainer" containerID="7c5dcbebb946d5becfdfb05069ee77616a38999601dd758631f2beb699bd1df1" Oct 01 06:41:40 crc kubenswrapper[4747]: I1001 06:41:40.394895 4747 scope.go:117] "RemoveContainer" containerID="2134368fbe4502166120232a8f72de91dc4a379c115c627e58e1556f8a423cef" Oct 01 06:42:10 crc kubenswrapper[4747]: I1001 06:42:10.068598 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-db-create-hc9dd"] Oct 01 06:42:10 crc kubenswrapper[4747]: I1001 06:42:10.078138 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-db-create-hc9dd"] Oct 01 06:42:11 crc kubenswrapper[4747]: I1001 06:42:11.290104 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a71ca0c7-a4a5-4006-ae5b-8061889dd77c" path="/var/lib/kubelet/pods/a71ca0c7-a4a5-4006-ae5b-8061889dd77c/volumes" Oct 01 06:42:20 crc kubenswrapper[4747]: I1001 06:42:20.028642 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-9204-account-create-d75b4"] Oct 01 06:42:20 crc kubenswrapper[4747]: I1001 06:42:20.043785 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-9204-account-create-d75b4"] Oct 01 06:42:21 crc kubenswrapper[4747]: I1001 06:42:21.293536 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d4143ef-6665-4852-89f3-735b1952cd3c" path="/var/lib/kubelet/pods/4d4143ef-6665-4852-89f3-735b1952cd3c/volumes" Oct 01 06:42:36 crc kubenswrapper[4747]: I1001 06:42:36.056737 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-4wbfs"] Oct 01 06:42:36 crc kubenswrapper[4747]: I1001 06:42:36.069361 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-db-sync-4wbfs"] Oct 01 06:42:37 crc kubenswrapper[4747]: I1001 06:42:37.291827 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="274c38e7-1453-412a-9751-fd095df2157b" path="/var/lib/kubelet/pods/274c38e7-1453-412a-9751-fd095df2157b/volumes" Oct 01 06:42:40 crc kubenswrapper[4747]: I1001 06:42:40.455022 4747 scope.go:117] "RemoveContainer" containerID="423a8523ab448d99b2ca82a8173331cf13833fb1e8a0266a0d5d4dcc042a858f" Oct 01 06:42:40 crc kubenswrapper[4747]: I1001 06:42:40.489274 4747 scope.go:117] "RemoveContainer" containerID="88c8bd70fcef133d9effa0832e5bed9d94af164b8095c8e716dbd98f6ee556a6" Oct 01 06:42:40 crc kubenswrapper[4747]: I1001 06:42:40.544254 4747 scope.go:117] "RemoveContainer" containerID="e28bf6359fd12613cd7231a72a0f064b6ee2c120ba6b6d54eb56b88bad74a820" Oct 01 06:42:40 crc kubenswrapper[4747]: I1001 06:42:40.568788 4747 scope.go:117] "RemoveContainer" containerID="6506687389c71fc9bfa0d963c1a142e31e211dc4e1a46ad569fa14e32bc6c970" Oct 01 06:42:40 crc kubenswrapper[4747]: I1001 06:42:40.630957 4747 scope.go:117] "RemoveContainer" containerID="cd6f7b81ac82353b65be132c14fff1e89895469539f1fe6d8119dbbef93f4270" Oct 01 06:42:40 crc kubenswrapper[4747]: I1001 06:42:40.658646 4747 scope.go:117] "RemoveContainer" containerID="26f24683876d56b5f76470f21ee13444fb124e181f00ec7b01905f022e153ea4" Oct 01 06:42:40 crc kubenswrapper[4747]: I1001 06:42:40.709143 4747 scope.go:117] "RemoveContainer" containerID="79b3797b286723e514e5731f90237633dd8bac75e10f1253f848078bbc453b8d" Oct 01 06:42:42 crc kubenswrapper[4747]: I1001 06:42:42.030645 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-td5rr"] Oct 01 06:42:42 crc kubenswrapper[4747]: I1001 06:42:42.037787 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/keystone-bootstrap-td5rr"] Oct 01 06:42:43 crc kubenswrapper[4747]: I1001 06:42:43.292343 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b83dbf88-816b-49e0-932b-d31a5317a47d" path="/var/lib/kubelet/pods/b83dbf88-816b-49e0-932b-d31a5317a47d/volumes" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.631495 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:43:20 crc kubenswrapper[4747]: E1001 06:43:20.633034 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerName="registry-server" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.633070 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerName="registry-server" Oct 01 06:43:20 crc kubenswrapper[4747]: E1001 06:43:20.633144 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerName="extract-content" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.633161 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerName="extract-content" Oct 01 06:43:20 crc kubenswrapper[4747]: E1001 06:43:20.633196 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerName="extract-utilities" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.633218 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerName="extract-utilities" Oct 01 06:43:20 crc kubenswrapper[4747]: E1001 06:43:20.633250 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerName="extract-utilities" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.633266 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerName="extract-utilities" Oct 01 06:43:20 crc kubenswrapper[4747]: E1001 06:43:20.633358 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerName="extract-content" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.633377 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerName="extract-content" Oct 01 06:43:20 crc kubenswrapper[4747]: E1001 06:43:20.633409 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerName="registry-server" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.633427 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerName="registry-server" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.633795 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd667810-e3fb-4e61-8309-5e68e6e4ba61" containerName="registry-server" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.633847 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8e66afb-d831-4980-9278-f50e0c78c8e9" containerName="registry-server" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.635002 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.639574 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"default-dockercfg-mvtsj" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.639685 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-scripts-9db6gc427h" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.642122 4747 reflector.go:368] Caches populated for *v1.Secret from object-"glance-kuttl-tests"/"openstack-config-secret" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.643432 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"glance-kuttl-tests"/"openstack-config" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.655309 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.839046 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-scripts\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.839209 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-config-secret\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.839357 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-config\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.839435 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lwgt\" (UniqueName: \"kubernetes.io/projected/bb80c3ef-1c47-4549-a3b3-dd2e50612914-kube-api-access-7lwgt\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.940650 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-config-secret\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.940814 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-config\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.940876 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lwgt\" (UniqueName: \"kubernetes.io/projected/bb80c3ef-1c47-4549-a3b3-dd2e50612914-kube-api-access-7lwgt\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.940971 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-scripts\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.942589 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-config\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.942725 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-scripts\" (UniqueName: \"kubernetes.io/configmap/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-scripts\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.946994 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bb80c3ef-1c47-4549-a3b3-dd2e50612914-openstack-config-secret\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.960073 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lwgt\" (UniqueName: \"kubernetes.io/projected/bb80c3ef-1c47-4549-a3b3-dd2e50612914-kube-api-access-7lwgt\") pod \"openstackclient\" (UID: \"bb80c3ef-1c47-4549-a3b3-dd2e50612914\") " pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:20 crc kubenswrapper[4747]: I1001 06:43:20.968634 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="glance-kuttl-tests/openstackclient" Oct 01 06:43:21 crc kubenswrapper[4747]: I1001 06:43:21.421686 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["glance-kuttl-tests/openstackclient"] Oct 01 06:43:21 crc kubenswrapper[4747]: W1001 06:43:21.428448 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb80c3ef_1c47_4549_a3b3_dd2e50612914.slice/crio-fd7541d6a05530d96188c213b11b039f090b44dbfd993ca2991432cc892d7f30 WatchSource:0}: Error finding container fd7541d6a05530d96188c213b11b039f090b44dbfd993ca2991432cc892d7f30: Status 404 returned error can't find the container with id fd7541d6a05530d96188c213b11b039f090b44dbfd993ca2991432cc892d7f30 Oct 01 06:43:21 crc kubenswrapper[4747]: I1001 06:43:21.495153 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"bb80c3ef-1c47-4549-a3b3-dd2e50612914","Type":"ContainerStarted","Data":"fd7541d6a05530d96188c213b11b039f090b44dbfd993ca2991432cc892d7f30"} Oct 01 06:43:22 crc kubenswrapper[4747]: I1001 06:43:22.508147 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="glance-kuttl-tests/openstackclient" event={"ID":"bb80c3ef-1c47-4549-a3b3-dd2e50612914","Type":"ContainerStarted","Data":"f0d1c77142e9628fb41d7458c90062235abf643303ac70c9709ed748186b1549"} Oct 01 06:43:22 crc kubenswrapper[4747]: I1001 06:43:22.541898 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="glance-kuttl-tests/openstackclient" podStartSLOduration=2.541869065 podStartE2EDuration="2.541869065s" podCreationTimestamp="2025-10-01 06:43:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-01 06:43:22.530396686 +0000 UTC m=+1603.940053795" watchObservedRunningTime="2025-10-01 06:43:22.541869065 +0000 UTC m=+1603.951526174" Oct 01 06:43:35 crc kubenswrapper[4747]: I1001 06:43:35.761532 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:43:35 crc kubenswrapper[4747]: I1001 06:43:35.762225 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:43:40 crc kubenswrapper[4747]: I1001 06:43:40.862068 4747 scope.go:117] "RemoveContainer" containerID="0da78a383ff6673aaa0eee0924b9312236842212a1b616112c38a52115ee1d7b" Oct 01 06:44:05 crc kubenswrapper[4747]: I1001 06:44:05.760952 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:44:05 crc kubenswrapper[4747]: I1001 06:44:05.761455 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:44:35 crc kubenswrapper[4747]: I1001 06:44:35.760878 4747 patch_prober.go:28] interesting pod/machine-config-daemon-gh9dg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 01 06:44:35 crc kubenswrapper[4747]: I1001 06:44:35.761541 4747 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 01 06:44:35 crc kubenswrapper[4747]: I1001 06:44:35.761604 4747 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" Oct 01 06:44:35 crc kubenswrapper[4747]: I1001 06:44:35.762805 4747 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03"} pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 01 06:44:35 crc kubenswrapper[4747]: I1001 06:44:35.762911 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerName="machine-config-daemon" containerID="cri-o://8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" gracePeriod=600 Oct 01 06:44:36 crc kubenswrapper[4747]: I1001 06:44:36.165990 4747 generic.go:334] "Generic (PLEG): container finished" podID="90df9e29-7482-4ab7-84c6-f3029df17a0d" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" exitCode=0 Oct 01 06:44:36 crc kubenswrapper[4747]: I1001 06:44:36.166247 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerDied","Data":"8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03"} Oct 01 06:44:36 crc kubenswrapper[4747]: I1001 06:44:36.166320 4747 scope.go:117] "RemoveContainer" containerID="401ec113c485c56e663feb5d9e61759b53e4a33a866f086f08fbca2a246a142f" Oct 01 06:44:36 crc kubenswrapper[4747]: E1001 06:44:36.415025 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:44:37 crc kubenswrapper[4747]: I1001 06:44:37.182624 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:44:37 crc kubenswrapper[4747]: E1001 06:44:37.183093 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:44:50 crc kubenswrapper[4747]: I1001 06:44:50.276924 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:44:50 crc kubenswrapper[4747]: E1001 06:44:50.277547 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.501677 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bjw8g/must-gather-n94lr"] Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.503541 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.505352 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-bjw8g"/"openshift-service-ca.crt" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.505353 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-bjw8g"/"default-dockercfg-5hqt5" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.506199 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-bjw8g"/"kube-root-ca.crt" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.568044 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-bjw8g/must-gather-n94lr"] Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.586848 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv2f7\" (UniqueName: \"kubernetes.io/projected/1370403d-e53d-48fa-861f-0957fefb7adf-kube-api-access-jv2f7\") pod \"must-gather-n94lr\" (UID: \"1370403d-e53d-48fa-861f-0957fefb7adf\") " pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.586963 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370403d-e53d-48fa-861f-0957fefb7adf-must-gather-output\") pod \"must-gather-n94lr\" (UID: \"1370403d-e53d-48fa-861f-0957fefb7adf\") " pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.688138 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv2f7\" (UniqueName: \"kubernetes.io/projected/1370403d-e53d-48fa-861f-0957fefb7adf-kube-api-access-jv2f7\") pod \"must-gather-n94lr\" (UID: \"1370403d-e53d-48fa-861f-0957fefb7adf\") " pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.688316 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370403d-e53d-48fa-861f-0957fefb7adf-must-gather-output\") pod \"must-gather-n94lr\" (UID: \"1370403d-e53d-48fa-861f-0957fefb7adf\") " pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.688897 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370403d-e53d-48fa-861f-0957fefb7adf-must-gather-output\") pod \"must-gather-n94lr\" (UID: \"1370403d-e53d-48fa-861f-0957fefb7adf\") " pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.710445 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv2f7\" (UniqueName: \"kubernetes.io/projected/1370403d-e53d-48fa-861f-0957fefb7adf-kube-api-access-jv2f7\") pod \"must-gather-n94lr\" (UID: \"1370403d-e53d-48fa-861f-0957fefb7adf\") " pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:44:56 crc kubenswrapper[4747]: I1001 06:44:56.823993 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:44:57 crc kubenswrapper[4747]: I1001 06:44:57.270573 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-bjw8g/must-gather-n94lr"] Oct 01 06:44:57 crc kubenswrapper[4747]: I1001 06:44:57.381654 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bjw8g/must-gather-n94lr" event={"ID":"1370403d-e53d-48fa-861f-0957fefb7adf","Type":"ContainerStarted","Data":"b9777848da1626a28de9c5c9440a024518de5590209edcd311195a007679b3ac"} Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.156243 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x"] Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.157352 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.160697 4747 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.165936 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x"] Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.170545 4747 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.240760 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-config-volume\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.240866 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-secret-volume\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.240930 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vxr5\" (UniqueName: \"kubernetes.io/projected/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-kube-api-access-2vxr5\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.342164 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-secret-volume\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.342293 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vxr5\" (UniqueName: \"kubernetes.io/projected/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-kube-api-access-2vxr5\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.343655 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-config-volume\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.344715 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-config-volume\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.359710 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-secret-volume\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.360278 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vxr5\" (UniqueName: \"kubernetes.io/projected/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-kube-api-access-2vxr5\") pod \"collect-profiles-29321685-jvl7x\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:00 crc kubenswrapper[4747]: I1001 06:45:00.481709 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:01 crc kubenswrapper[4747]: I1001 06:45:01.906508 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x"] Oct 01 06:45:02 crc kubenswrapper[4747]: I1001 06:45:02.435491 4747 generic.go:334] "Generic (PLEG): container finished" podID="d1e9a7df-bf0b-4d74-8e30-3d4154782cfc" containerID="17c2e7dc65d482ba33f487957dd6598acb782f84fb544f96e19965ab1898d9dc" exitCode=0 Oct 01 06:45:02 crc kubenswrapper[4747]: I1001 06:45:02.435599 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" event={"ID":"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc","Type":"ContainerDied","Data":"17c2e7dc65d482ba33f487957dd6598acb782f84fb544f96e19965ab1898d9dc"} Oct 01 06:45:02 crc kubenswrapper[4747]: I1001 06:45:02.435949 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" event={"ID":"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc","Type":"ContainerStarted","Data":"4b4dc63ac48323271975f1e4fcea481db792a04e563c89fdc03ce0c82459575a"} Oct 01 06:45:02 crc kubenswrapper[4747]: I1001 06:45:02.438435 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bjw8g/must-gather-n94lr" event={"ID":"1370403d-e53d-48fa-861f-0957fefb7adf","Type":"ContainerStarted","Data":"4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e"} Oct 01 06:45:02 crc kubenswrapper[4747]: I1001 06:45:02.438503 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bjw8g/must-gather-n94lr" event={"ID":"1370403d-e53d-48fa-861f-0957fefb7adf","Type":"ContainerStarted","Data":"760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33"} Oct 01 06:45:02 crc kubenswrapper[4747]: I1001 06:45:02.475989 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-bjw8g/must-gather-n94lr" podStartSLOduration=2.338582932 podStartE2EDuration="6.475966821s" podCreationTimestamp="2025-10-01 06:44:56 +0000 UTC" firstStartedPulling="2025-10-01 06:44:57.282216332 +0000 UTC m=+1698.691873381" lastFinishedPulling="2025-10-01 06:45:01.419600221 +0000 UTC m=+1702.829257270" observedRunningTime="2025-10-01 06:45:02.475006238 +0000 UTC m=+1703.884663287" watchObservedRunningTime="2025-10-01 06:45:02.475966821 +0000 UTC m=+1703.885623880" Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.276885 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:45:03 crc kubenswrapper[4747]: E1001 06:45:03.277169 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.709897 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.799847 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-secret-volume\") pod \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.799907 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vxr5\" (UniqueName: \"kubernetes.io/projected/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-kube-api-access-2vxr5\") pod \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.799985 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-config-volume\") pod \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\" (UID: \"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc\") " Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.800927 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-config-volume" (OuterVolumeSpecName: "config-volume") pod "d1e9a7df-bf0b-4d74-8e30-3d4154782cfc" (UID: "d1e9a7df-bf0b-4d74-8e30-3d4154782cfc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.805204 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-kube-api-access-2vxr5" (OuterVolumeSpecName: "kube-api-access-2vxr5") pod "d1e9a7df-bf0b-4d74-8e30-3d4154782cfc" (UID: "d1e9a7df-bf0b-4d74-8e30-3d4154782cfc"). InnerVolumeSpecName "kube-api-access-2vxr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.805593 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d1e9a7df-bf0b-4d74-8e30-3d4154782cfc" (UID: "d1e9a7df-bf0b-4d74-8e30-3d4154782cfc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.901553 4747 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.901588 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vxr5\" (UniqueName: \"kubernetes.io/projected/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-kube-api-access-2vxr5\") on node \"crc\" DevicePath \"\"" Oct 01 06:45:03 crc kubenswrapper[4747]: I1001 06:45:03.901597 4747 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d1e9a7df-bf0b-4d74-8e30-3d4154782cfc-config-volume\") on node \"crc\" DevicePath \"\"" Oct 01 06:45:04 crc kubenswrapper[4747]: I1001 06:45:04.452924 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" event={"ID":"d1e9a7df-bf0b-4d74-8e30-3d4154782cfc","Type":"ContainerDied","Data":"4b4dc63ac48323271975f1e4fcea481db792a04e563c89fdc03ce0c82459575a"} Oct 01 06:45:04 crc kubenswrapper[4747]: I1001 06:45:04.452970 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b4dc63ac48323271975f1e4fcea481db792a04e563c89fdc03ce0c82459575a" Oct 01 06:45:04 crc kubenswrapper[4747]: I1001 06:45:04.453024 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29321685-jvl7x" Oct 01 06:45:14 crc kubenswrapper[4747]: I1001 06:45:14.276541 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:45:14 crc kubenswrapper[4747]: E1001 06:45:14.277307 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:45:25 crc kubenswrapper[4747]: I1001 06:45:25.278931 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:45:25 crc kubenswrapper[4747]: E1001 06:45:25.279637 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.042681 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl_2cd118b3-1b7f-497d-913e-2938d710bce4/util/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.165668 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl_2cd118b3-1b7f-497d-913e-2938d710bce4/util/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.168613 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl_2cd118b3-1b7f-497d-913e-2938d710bce4/pull/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.194104 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl_2cd118b3-1b7f-497d-913e-2938d710bce4/pull/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.276298 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:45:38 crc kubenswrapper[4747]: E1001 06:45:38.276601 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.339457 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl_2cd118b3-1b7f-497d-913e-2938d710bce4/util/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.348077 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl_2cd118b3-1b7f-497d-913e-2938d710bce4/pull/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.364653 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_27bc8640bf9574cb4ef345ae08eb5466dd1c33b998127973c6eb353a16ln9pl_2cd118b3-1b7f-497d-913e-2938d710bce4/extract/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.536014 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw_a4150451-d4ba-43d4-a834-f97510776094/util/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.670451 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw_a4150451-d4ba-43d4-a834-f97510776094/pull/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.694740 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw_a4150451-d4ba-43d4-a834-f97510776094/util/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.718333 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw_a4150451-d4ba-43d4-a834-f97510776094/pull/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.857645 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw_a4150451-d4ba-43d4-a834-f97510776094/util/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.880880 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw_a4150451-d4ba-43d4-a834-f97510776094/pull/0.log" Oct 01 06:45:38 crc kubenswrapper[4747]: I1001 06:45:38.937284 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2bbe28ed40a2d866c28b219e48f73bb6c06c95a9ad2ad4485030698b6147spw_a4150451-d4ba-43d4-a834-f97510776094/extract/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.026091 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj_dcea7572-1f85-4dcc-83b1-4b8dcf554ade/util/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.203542 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj_dcea7572-1f85-4dcc-83b1-4b8dcf554ade/util/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.224470 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj_dcea7572-1f85-4dcc-83b1-4b8dcf554ade/pull/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.226208 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj_dcea7572-1f85-4dcc-83b1-4b8dcf554ade/pull/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.420635 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj_dcea7572-1f85-4dcc-83b1-4b8dcf554ade/pull/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.423795 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj_dcea7572-1f85-4dcc-83b1-4b8dcf554ade/util/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.440212 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6bbb426e104b79adc5cea6e5b6b6aa5158f9f5bfeb3bc1313159a48466kgdpj_dcea7572-1f85-4dcc-83b1-4b8dcf554ade/extract/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.589571 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25_f13c90e4-af2a-4615-a726-3f5f36ad445e/util/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.728386 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25_f13c90e4-af2a-4615-a726-3f5f36ad445e/pull/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.756300 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25_f13c90e4-af2a-4615-a726-3f5f36ad445e/pull/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.794197 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25_f13c90e4-af2a-4615-a726-3f5f36ad445e/util/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.945775 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25_f13c90e4-af2a-4615-a726-3f5f36ad445e/util/0.log" Oct 01 06:45:39 crc kubenswrapper[4747]: I1001 06:45:39.977710 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25_f13c90e4-af2a-4615-a726-3f5f36ad445e/pull/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.009339 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590gvf25_f13c90e4-af2a-4615-a726-3f5f36ad445e/extract/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.161153 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r_291e00e8-ae3e-4eaa-8dd7-056c954d4800/util/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.267913 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r_291e00e8-ae3e-4eaa-8dd7-056c954d4800/pull/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.299662 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r_291e00e8-ae3e-4eaa-8dd7-056c954d4800/util/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.331820 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r_291e00e8-ae3e-4eaa-8dd7-056c954d4800/pull/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.490400 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r_291e00e8-ae3e-4eaa-8dd7-056c954d4800/util/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.524193 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r_291e00e8-ae3e-4eaa-8dd7-056c954d4800/pull/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.529644 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_bd1bad0265b413fed69a73530da620b6041562828be055ffecb1aa2303mp77r_291e00e8-ae3e-4eaa-8dd7-056c954d4800/extract/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.638415 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v_4a45e1f7-8e3a-4628-95fa-1e0d77b95217/util/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.777151 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v_4a45e1f7-8e3a-4628-95fa-1e0d77b95217/util/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.813301 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v_4a45e1f7-8e3a-4628-95fa-1e0d77b95217/pull/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.820358 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v_4a45e1f7-8e3a-4628-95fa-1e0d77b95217/pull/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.958273 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v_4a45e1f7-8e3a-4628-95fa-1e0d77b95217/extract/0.log" Oct 01 06:45:40 crc kubenswrapper[4747]: I1001 06:45:40.968620 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v_4a45e1f7-8e3a-4628-95fa-1e0d77b95217/pull/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.001356 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ccd7ba0e3641b58b769150b773d66c24839b2b4bd2cc3744868f2e60a3fcx8v_4a45e1f7-8e3a-4628-95fa-1e0d77b95217/util/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.058815 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk_1023659f-3dcf-4a2c-8f6a-eeda4c6a0828/util/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.216162 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk_1023659f-3dcf-4a2c-8f6a-eeda4c6a0828/pull/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.218235 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk_1023659f-3dcf-4a2c-8f6a-eeda4c6a0828/pull/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.240719 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk_1023659f-3dcf-4a2c-8f6a-eeda4c6a0828/util/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.366544 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk_1023659f-3dcf-4a2c-8f6a-eeda4c6a0828/pull/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.403813 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk_1023659f-3dcf-4a2c-8f6a-eeda4c6a0828/extract/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.417376 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-595f9d7bb-btldp_32800c0d-bcf4-4f5d-b8db-598f4450ce31/kube-rbac-proxy/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.418910 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_e7e08b4ef4c4210da6849e65893e25f02a2f1e5ad24c8e4d88ab10670f7lsbk_1023659f-3dcf-4a2c-8f6a-eeda4c6a0828/util/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.561267 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-595f9d7bb-btldp_32800c0d-bcf4-4f5d-b8db-598f4450ce31/manager/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.604779 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d8f97bbbb-pmdmc_cc97678f-a010-49c0-bc9c-a46288467178/kube-rbac-proxy/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.652681 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-index-lg8t8_5c4d75c0-6aea-4ef6-aba7-8bc84df2ba8f/registry-server/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.716168 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6d8f97bbbb-pmdmc_cc97678f-a010-49c0-bc9c-a46288467178/manager/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.771283 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-index-llv5t_06748093-f73a-4ea5-a452-3b18dc0a9581/registry-server/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.827314 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-68d7898b58-wqz8p_c21a5c1e-b158-4adf-bc18-818df3862825/kube-rbac-proxy/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.907855 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-68d7898b58-wqz8p_c21a5c1e-b158-4adf-bc18-818df3862825/manager/0.log" Oct 01 06:45:41 crc kubenswrapper[4747]: I1001 06:45:41.963346 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-index-ftl54_8482a7f4-08c7-4bc5-bef0-b44cd14ca523/registry-server/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.020985 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-56d5bb7f9d-8z2pb_23011ab7-244d-4f51-831e-6a2817fb36d2/kube-rbac-proxy/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.171140 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-index-nrlsf_67b6bc26-65f0-403d-be25-31ff044af9a1/registry-server/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.185797 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-56d5bb7f9d-8z2pb_23011ab7-244d-4f51-831e-6a2817fb36d2/manager/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.240969 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-74c5fcf84b-mgdfk_3a7cc65a-645c-4533-b334-5f003e1d8382/kube-rbac-proxy/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.469274 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-index-qhm8b_49ee0c08-0ca2-4d99-9a74-bb059025f4bc/registry-server/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.475010 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-74c5fcf84b-mgdfk_3a7cc65a-645c-4533-b334-5f003e1d8382/manager/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.536250 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-779fc9694b-78kw7_c2a1092f-de97-4fa0-9922-3cbcb000f041/operator/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.681714 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-index-czqvp_ce3e3ea5-74d4-4cfb-b5d3-2dd3861d9c86/registry-server/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.686115 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-64d9946955-5jftj_6179950f-9f69-4c73-8400-b9651eabe647/kube-rbac-proxy/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.705919 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-64d9946955-5jftj_6179950f-9f69-4c73-8400-b9651eabe647/manager/0.log" Oct 01 06:45:42 crc kubenswrapper[4747]: I1001 06:45:42.833203 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-index-mk8pd_344442ff-e0f2-437f-806c-62c39d7d5aaf/registry-server/0.log" Oct 01 06:45:53 crc kubenswrapper[4747]: I1001 06:45:53.280811 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:45:53 crc kubenswrapper[4747]: E1001 06:45:53.281462 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:45:58 crc kubenswrapper[4747]: I1001 06:45:58.450905 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-w2ctv_5378035b-02a7-42d4-9c55-91de32b377c0/control-plane-machine-set-operator/0.log" Oct 01 06:45:58 crc kubenswrapper[4747]: I1001 06:45:58.626305 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vh9mz_b491ab08-e1e7-4166-b2fc-3d265a06414f/kube-rbac-proxy/0.log" Oct 01 06:45:58 crc kubenswrapper[4747]: I1001 06:45:58.632441 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vh9mz_b491ab08-e1e7-4166-b2fc-3d265a06414f/machine-api-operator/0.log" Oct 01 06:46:08 crc kubenswrapper[4747]: I1001 06:46:08.276348 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:46:08 crc kubenswrapper[4747]: E1001 06:46:08.277848 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:46:15 crc kubenswrapper[4747]: I1001 06:46:15.561222 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-v6qhr_ad53abf4-c21f-4dcc-9761-aed314fca36c/kube-rbac-proxy/0.log" Oct 01 06:46:15 crc kubenswrapper[4747]: I1001 06:46:15.574555 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-5d688f5ffc-v6qhr_ad53abf4-c21f-4dcc-9761-aed314fca36c/controller/0.log" Oct 01 06:46:15 crc kubenswrapper[4747]: I1001 06:46:15.698038 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-frr-files/0.log" Oct 01 06:46:15 crc kubenswrapper[4747]: I1001 06:46:15.890945 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-reloader/0.log" Oct 01 06:46:15 crc kubenswrapper[4747]: I1001 06:46:15.932081 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-reloader/0.log" Oct 01 06:46:15 crc kubenswrapper[4747]: I1001 06:46:15.939087 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-metrics/0.log" Oct 01 06:46:15 crc kubenswrapper[4747]: I1001 06:46:15.945665 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-frr-files/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.087777 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-frr-files/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.147560 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-metrics/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.153037 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-metrics/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.158376 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-reloader/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.293722 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-metrics/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.322064 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-frr-files/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.339945 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/cp-reloader/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.342730 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/controller/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.495890 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/frr-metrics/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.543921 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/kube-rbac-proxy-frr/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.547319 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/kube-rbac-proxy/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.687923 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/reloader/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.785997 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-5478bdb765-mm45p_0b60e795-ff1b-4dfc-b3cf-3b28b92ac293/frr-k8s-webhook-server/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.897005 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-9rgjv_9bda04f7-665b-4bd0-9884-999b80fcb561/frr/0.log" Oct 01 06:46:16 crc kubenswrapper[4747]: I1001 06:46:16.991602 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-8fcb75648-xwqnj_f14a8d6e-977f-4144-8a2a-d2d534a6f89b/manager/0.log" Oct 01 06:46:17 crc kubenswrapper[4747]: I1001 06:46:17.073670 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-556dcd774d-2bh5z_b0bffa6f-ff4b-43d8-82f5-1be2e812e0a0/webhook-server/0.log" Oct 01 06:46:17 crc kubenswrapper[4747]: I1001 06:46:17.124473 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-zw58l_25e93d69-ecec-4c53-81e5-18cd341f14f3/kube-rbac-proxy/0.log" Oct 01 06:46:17 crc kubenswrapper[4747]: I1001 06:46:17.243982 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-zw58l_25e93d69-ecec-4c53-81e5-18cd341f14f3/speaker/0.log" Oct 01 06:46:22 crc kubenswrapper[4747]: I1001 06:46:22.277239 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:46:22 crc kubenswrapper[4747]: E1001 06:46:22.278006 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:46:31 crc kubenswrapper[4747]: I1001 06:46:31.659346 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-b4f6-account-create-zkdj4_fcde1a2d-d437-45e9-ab7a-37ce46038e1c/mariadb-account-create/0.log" Oct 01 06:46:31 crc kubenswrapper[4747]: I1001 06:46:31.795756 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-db-create-8gmd8_ae14f798-2ba0-4b69-9e18-e55a48b38b71/mariadb-database-create/0.log" Oct 01 06:46:31 crc kubenswrapper[4747]: I1001 06:46:31.833025 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-db-sync-rrxt7_2c39714a-ba3e-4c86-88d6-0a60bc88d227/glance-db-sync/0.log" Oct 01 06:46:31 crc kubenswrapper[4747]: I1001 06:46:31.949857 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_5580ae54-87c6-4762-b717-dacdd7195382/glance-api/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.028102 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_5580ae54-87c6-4762-b717-dacdd7195382/glance-log/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.035911 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-external-api-0_5580ae54-87c6-4762-b717-dacdd7195382/glance-httpd/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.135279 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_ce3a717a-3933-4866-8d52-63c2b160db8f/glance-api/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.204325 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_ce3a717a-3933-4866-8d52-63c2b160db8f/glance-httpd/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.223215 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_glance-default-internal-api-0_ce3a717a-3933-4866-8d52-63c2b160db8f/glance-log/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.507260 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_memcached-0_cca7231f-4324-4f02-8d14-f08c4b7382e3/memcached/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.592782 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_keystone-c7949ccdb-cvdvf_a23fe2de-1da0-4e06-8b11-90c618eb2a15/keystone-api/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.635519 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_97ce40d4-b67f-474b-8e9f-9657e253305d/mysql-bootstrap/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.791543 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_97ce40d4-b67f-474b-8e9f-9657e253305d/galera/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.850133 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-0_97ce40d4-b67f-474b-8e9f-9657e253305d/mysql-bootstrap/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.861338 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_81b25fd7-ceb6-4b9f-9398-ac38129304a0/mysql-bootstrap/0.log" Oct 01 06:46:32 crc kubenswrapper[4747]: I1001 06:46:32.966541 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_81b25fd7-ceb6-4b9f-9398-ac38129304a0/mysql-bootstrap/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.022536 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_7fe827ea-ce04-449a-8a2c-5a99a3d76343/mysql-bootstrap/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.034917 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-1_81b25fd7-ceb6-4b9f-9398-ac38129304a0/galera/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.246545 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_7fe827ea-ce04-449a-8a2c-5a99a3d76343/mysql-bootstrap/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.255254 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstackclient_bb80c3ef-1c47-4549-a3b3-dd2e50612914/openstackclient/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.273608 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_openstack-galera-2_7fe827ea-ce04-449a-8a2c-5a99a3d76343/galera/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.277117 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:46:33 crc kubenswrapper[4747]: E1001 06:46:33.277361 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.456027 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_265c0df4-e327-42c7-bd89-f88ad59209ec/setup-container/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.601914 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_265c0df4-e327-42c7-bd89-f88ad59209ec/setup-container/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.611038 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_rabbitmq-server-0_265c0df4-e327-42c7-bd89-f88ad59209ec/rabbitmq/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.688993 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-proxy-7578798499-5kbdv_a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c/proxy-httpd/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.770144 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-proxy-7578798499-5kbdv_a93d35d8-feb9-463b-bfd5-c1aa6ebfcf9c/proxy-server/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.838473 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-ring-rebalance-qtqmv_928ccacf-68c1-4861-9804-320bcdc66f93/swift-ring-rebalance/0.log" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.889716 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hgxqj"] Oct 01 06:46:33 crc kubenswrapper[4747]: E1001 06:46:33.890052 4747 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1e9a7df-bf0b-4d74-8e30-3d4154782cfc" containerName="collect-profiles" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.890075 4747 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1e9a7df-bf0b-4d74-8e30-3d4154782cfc" containerName="collect-profiles" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.890307 4747 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1e9a7df-bf0b-4d74-8e30-3d4154782cfc" containerName="collect-profiles" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.891376 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.901217 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hgxqj"] Oct 01 06:46:33 crc kubenswrapper[4747]: I1001 06:46:33.976941 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/account-auditor/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.002037 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-catalog-content\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.002092 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-utilities\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.002131 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7qbl\" (UniqueName: \"kubernetes.io/projected/7fa102e4-1605-4f1d-8d24-ee06801b9142-kube-api-access-m7qbl\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.011333 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/account-reaper/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.098686 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/account-server/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.103983 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-utilities\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.104037 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7qbl\" (UniqueName: \"kubernetes.io/projected/7fa102e4-1605-4f1d-8d24-ee06801b9142-kube-api-access-m7qbl\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.104123 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-catalog-content\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.104596 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-utilities\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.104604 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-catalog-content\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.123345 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7qbl\" (UniqueName: \"kubernetes.io/projected/7fa102e4-1605-4f1d-8d24-ee06801b9142-kube-api-access-m7qbl\") pod \"redhat-operators-hgxqj\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.149615 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/account-replicator/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.169997 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/container-auditor/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.208573 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.215260 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/container-replicator/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.315226 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/container-server/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.442716 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/object-auditor/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.453363 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/container-updater/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.479975 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/object-expirer/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.575505 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/object-replicator/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.637528 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hgxqj"] Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.664828 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/object-server/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.672882 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/rsync/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.682111 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/object-updater/0.log" Oct 01 06:46:34 crc kubenswrapper[4747]: I1001 06:46:34.757843 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/glance-kuttl-tests_swift-storage-0_55fda102-0ab8-430c-b4b9-2ca87772c44d/swift-recon-cron/0.log" Oct 01 06:46:35 crc kubenswrapper[4747]: I1001 06:46:35.210386 4747 generic.go:334] "Generic (PLEG): container finished" podID="7fa102e4-1605-4f1d-8d24-ee06801b9142" containerID="6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63" exitCode=0 Oct 01 06:46:35 crc kubenswrapper[4747]: I1001 06:46:35.210495 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgxqj" event={"ID":"7fa102e4-1605-4f1d-8d24-ee06801b9142","Type":"ContainerDied","Data":"6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63"} Oct 01 06:46:35 crc kubenswrapper[4747]: I1001 06:46:35.210923 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgxqj" event={"ID":"7fa102e4-1605-4f1d-8d24-ee06801b9142","Type":"ContainerStarted","Data":"787e9087dfce0bca435d2d8538176932ddd9b944072d65b903818fac097932f4"} Oct 01 06:46:35 crc kubenswrapper[4747]: I1001 06:46:35.212771 4747 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.219377 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgxqj" event={"ID":"7fa102e4-1605-4f1d-8d24-ee06801b9142","Type":"ContainerStarted","Data":"aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad"} Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.490966 4747 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-cxxwf"] Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.492621 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.509474 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cxxwf"] Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.648601 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dffv8\" (UniqueName: \"kubernetes.io/projected/7199d0fa-b334-4533-a3a4-fe1e4e405438-kube-api-access-dffv8\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.648660 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-utilities\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.648784 4747 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-catalog-content\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.750491 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-catalog-content\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.750535 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dffv8\" (UniqueName: \"kubernetes.io/projected/7199d0fa-b334-4533-a3a4-fe1e4e405438-kube-api-access-dffv8\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.750573 4747 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-utilities\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.751086 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-utilities\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.751188 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-catalog-content\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.779245 4747 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dffv8\" (UniqueName: \"kubernetes.io/projected/7199d0fa-b334-4533-a3a4-fe1e4e405438-kube-api-access-dffv8\") pod \"certified-operators-cxxwf\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:36 crc kubenswrapper[4747]: I1001 06:46:36.824113 4747 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:37 crc kubenswrapper[4747]: I1001 06:46:37.228589 4747 generic.go:334] "Generic (PLEG): container finished" podID="7fa102e4-1605-4f1d-8d24-ee06801b9142" containerID="aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad" exitCode=0 Oct 01 06:46:37 crc kubenswrapper[4747]: I1001 06:46:37.228686 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgxqj" event={"ID":"7fa102e4-1605-4f1d-8d24-ee06801b9142","Type":"ContainerDied","Data":"aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad"} Oct 01 06:46:37 crc kubenswrapper[4747]: I1001 06:46:37.307368 4747 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-cxxwf"] Oct 01 06:46:37 crc kubenswrapper[4747]: W1001 06:46:37.311465 4747 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7199d0fa_b334_4533_a3a4_fe1e4e405438.slice/crio-9de8e1eb98f79432e1bea7e163c3766310389c56171e4011224f73caefc353c0 WatchSource:0}: Error finding container 9de8e1eb98f79432e1bea7e163c3766310389c56171e4011224f73caefc353c0: Status 404 returned error can't find the container with id 9de8e1eb98f79432e1bea7e163c3766310389c56171e4011224f73caefc353c0 Oct 01 06:46:38 crc kubenswrapper[4747]: I1001 06:46:38.239535 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgxqj" event={"ID":"7fa102e4-1605-4f1d-8d24-ee06801b9142","Type":"ContainerStarted","Data":"b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01"} Oct 01 06:46:38 crc kubenswrapper[4747]: I1001 06:46:38.241393 4747 generic.go:334] "Generic (PLEG): container finished" podID="7199d0fa-b334-4533-a3a4-fe1e4e405438" containerID="bd2f24ed1a02a07a94a64b151ece795432e0bda0004e94036466bc19dd3fb40f" exitCode=0 Oct 01 06:46:38 crc kubenswrapper[4747]: I1001 06:46:38.241437 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxxwf" event={"ID":"7199d0fa-b334-4533-a3a4-fe1e4e405438","Type":"ContainerDied","Data":"bd2f24ed1a02a07a94a64b151ece795432e0bda0004e94036466bc19dd3fb40f"} Oct 01 06:46:38 crc kubenswrapper[4747]: I1001 06:46:38.241461 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxxwf" event={"ID":"7199d0fa-b334-4533-a3a4-fe1e4e405438","Type":"ContainerStarted","Data":"9de8e1eb98f79432e1bea7e163c3766310389c56171e4011224f73caefc353c0"} Oct 01 06:46:38 crc kubenswrapper[4747]: I1001 06:46:38.299565 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hgxqj" podStartSLOduration=2.854116384 podStartE2EDuration="5.299549939s" podCreationTimestamp="2025-10-01 06:46:33 +0000 UTC" firstStartedPulling="2025-10-01 06:46:35.212528395 +0000 UTC m=+1796.622185444" lastFinishedPulling="2025-10-01 06:46:37.65796193 +0000 UTC m=+1799.067618999" observedRunningTime="2025-10-01 06:46:38.278167007 +0000 UTC m=+1799.687824116" watchObservedRunningTime="2025-10-01 06:46:38.299549939 +0000 UTC m=+1799.709206978" Oct 01 06:46:39 crc kubenswrapper[4747]: I1001 06:46:39.039788 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-create-8gmd8"] Oct 01 06:46:39 crc kubenswrapper[4747]: I1001 06:46:39.049309 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-create-8gmd8"] Oct 01 06:46:39 crc kubenswrapper[4747]: I1001 06:46:39.249788 4747 generic.go:334] "Generic (PLEG): container finished" podID="7199d0fa-b334-4533-a3a4-fe1e4e405438" containerID="1ba8eb7cff39f2fbab7ae8ef5f95a4a8222dcdc7c03c5d8061582ccad1c3d503" exitCode=0 Oct 01 06:46:39 crc kubenswrapper[4747]: I1001 06:46:39.249827 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxxwf" event={"ID":"7199d0fa-b334-4533-a3a4-fe1e4e405438","Type":"ContainerDied","Data":"1ba8eb7cff39f2fbab7ae8ef5f95a4a8222dcdc7c03c5d8061582ccad1c3d503"} Oct 01 06:46:39 crc kubenswrapper[4747]: I1001 06:46:39.287798 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae14f798-2ba0-4b69-9e18-e55a48b38b71" path="/var/lib/kubelet/pods/ae14f798-2ba0-4b69-9e18-e55a48b38b71/volumes" Oct 01 06:46:40 crc kubenswrapper[4747]: I1001 06:46:40.265189 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxxwf" event={"ID":"7199d0fa-b334-4533-a3a4-fe1e4e405438","Type":"ContainerStarted","Data":"65512adc0a9e3f75f43acd1607c1dc0608467ff019b3797c988cbc2b4886e917"} Oct 01 06:46:40 crc kubenswrapper[4747]: I1001 06:46:40.970280 4747 scope.go:117] "RemoveContainer" containerID="1fee38e3b98cf70adbceeec25727f3aee1bca6137f92016cb825de41e4a66c16" Oct 01 06:46:44 crc kubenswrapper[4747]: I1001 06:46:44.209154 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:44 crc kubenswrapper[4747]: I1001 06:46:44.210019 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:44 crc kubenswrapper[4747]: I1001 06:46:44.296222 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:44 crc kubenswrapper[4747]: I1001 06:46:44.324963 4747 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-cxxwf" podStartSLOduration=6.921454568 podStartE2EDuration="8.324908314s" podCreationTimestamp="2025-10-01 06:46:36 +0000 UTC" firstStartedPulling="2025-10-01 06:46:38.242994718 +0000 UTC m=+1799.652651767" lastFinishedPulling="2025-10-01 06:46:39.646448424 +0000 UTC m=+1801.056105513" observedRunningTime="2025-10-01 06:46:40.291010626 +0000 UTC m=+1801.700667675" watchObservedRunningTime="2025-10-01 06:46:44.324908314 +0000 UTC m=+1805.734565393" Oct 01 06:46:44 crc kubenswrapper[4747]: I1001 06:46:44.376401 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:44 crc kubenswrapper[4747]: I1001 06:46:44.542683 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hgxqj"] Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.277416 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:46:46 crc kubenswrapper[4747]: E1001 06:46:46.277952 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.321319 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hgxqj" podUID="7fa102e4-1605-4f1d-8d24-ee06801b9142" containerName="registry-server" containerID="cri-o://b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01" gracePeriod=2 Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.755997 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.824880 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.824930 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.865354 4747 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.900944 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-utilities\") pod \"7fa102e4-1605-4f1d-8d24-ee06801b9142\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.901067 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7qbl\" (UniqueName: \"kubernetes.io/projected/7fa102e4-1605-4f1d-8d24-ee06801b9142-kube-api-access-m7qbl\") pod \"7fa102e4-1605-4f1d-8d24-ee06801b9142\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.901179 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-catalog-content\") pod \"7fa102e4-1605-4f1d-8d24-ee06801b9142\" (UID: \"7fa102e4-1605-4f1d-8d24-ee06801b9142\") " Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.902183 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-utilities" (OuterVolumeSpecName: "utilities") pod "7fa102e4-1605-4f1d-8d24-ee06801b9142" (UID: "7fa102e4-1605-4f1d-8d24-ee06801b9142"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:46:46 crc kubenswrapper[4747]: I1001 06:46:46.940882 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fa102e4-1605-4f1d-8d24-ee06801b9142-kube-api-access-m7qbl" (OuterVolumeSpecName: "kube-api-access-m7qbl") pod "7fa102e4-1605-4f1d-8d24-ee06801b9142" (UID: "7fa102e4-1605-4f1d-8d24-ee06801b9142"). InnerVolumeSpecName "kube-api-access-m7qbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.008108 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.008140 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7qbl\" (UniqueName: \"kubernetes.io/projected/7fa102e4-1605-4f1d-8d24-ee06801b9142-kube-api-access-m7qbl\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.331162 4747 generic.go:334] "Generic (PLEG): container finished" podID="7fa102e4-1605-4f1d-8d24-ee06801b9142" containerID="b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01" exitCode=0 Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.331219 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgxqj" event={"ID":"7fa102e4-1605-4f1d-8d24-ee06801b9142","Type":"ContainerDied","Data":"b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01"} Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.331259 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgxqj" event={"ID":"7fa102e4-1605-4f1d-8d24-ee06801b9142","Type":"ContainerDied","Data":"787e9087dfce0bca435d2d8538176932ddd9b944072d65b903818fac097932f4"} Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.331280 4747 scope.go:117] "RemoveContainer" containerID="b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.331303 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgxqj" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.353959 4747 scope.go:117] "RemoveContainer" containerID="aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.375404 4747 scope.go:117] "RemoveContainer" containerID="6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.399626 4747 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.448152 4747 scope.go:117] "RemoveContainer" containerID="b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01" Oct 01 06:46:47 crc kubenswrapper[4747]: E1001 06:46:47.448728 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01\": container with ID starting with b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01 not found: ID does not exist" containerID="b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.448804 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01"} err="failed to get container status \"b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01\": rpc error: code = NotFound desc = could not find container \"b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01\": container with ID starting with b047f8b0c93cc4af5b46289fc1522c3ec9a364cc53fdb053878c2fac9de6ec01 not found: ID does not exist" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.448835 4747 scope.go:117] "RemoveContainer" containerID="aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad" Oct 01 06:46:47 crc kubenswrapper[4747]: E1001 06:46:47.449241 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad\": container with ID starting with aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad not found: ID does not exist" containerID="aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.449272 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad"} err="failed to get container status \"aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad\": rpc error: code = NotFound desc = could not find container \"aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad\": container with ID starting with aca9fc1eec17873e5add6d1d9e8d3d3452ae125a2bad930d937adaf34bb8c3ad not found: ID does not exist" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.449294 4747 scope.go:117] "RemoveContainer" containerID="6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63" Oct 01 06:46:47 crc kubenswrapper[4747]: E1001 06:46:47.449698 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63\": container with ID starting with 6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63 not found: ID does not exist" containerID="6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.449791 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63"} err="failed to get container status \"6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63\": rpc error: code = NotFound desc = could not find container \"6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63\": container with ID starting with 6045129ae7e41560faf5a8707e36c1821384d1e47724ba32847608205a635b63 not found: ID does not exist" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.623685 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7fa102e4-1605-4f1d-8d24-ee06801b9142" (UID: "7fa102e4-1605-4f1d-8d24-ee06801b9142"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.662672 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hgxqj"] Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.667958 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hgxqj"] Oct 01 06:46:47 crc kubenswrapper[4747]: I1001 06:46:47.721008 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7fa102e4-1605-4f1d-8d24-ee06801b9142-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:48 crc kubenswrapper[4747]: I1001 06:46:48.140656 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cxxwf"] Oct 01 06:46:49 crc kubenswrapper[4747]: I1001 06:46:49.022005 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-b4f6-account-create-zkdj4"] Oct 01 06:46:49 crc kubenswrapper[4747]: I1001 06:46:49.028516 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-b4f6-account-create-zkdj4"] Oct 01 06:46:49 crc kubenswrapper[4747]: I1001 06:46:49.299438 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fa102e4-1605-4f1d-8d24-ee06801b9142" path="/var/lib/kubelet/pods/7fa102e4-1605-4f1d-8d24-ee06801b9142/volumes" Oct 01 06:46:49 crc kubenswrapper[4747]: I1001 06:46:49.301734 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcde1a2d-d437-45e9-ab7a-37ce46038e1c" path="/var/lib/kubelet/pods/fcde1a2d-d437-45e9-ab7a-37ce46038e1c/volumes" Oct 01 06:46:49 crc kubenswrapper[4747]: I1001 06:46:49.369027 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-cxxwf" podUID="7199d0fa-b334-4533-a3a4-fe1e4e405438" containerName="registry-server" containerID="cri-o://65512adc0a9e3f75f43acd1607c1dc0608467ff019b3797c988cbc2b4886e917" gracePeriod=2 Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.031256 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4bfp9_18fb0a3d-c326-4741-a6af-4897f1740900/extract-utilities/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.140359 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4bfp9_18fb0a3d-c326-4741-a6af-4897f1740900/extract-utilities/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.238027 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4bfp9_18fb0a3d-c326-4741-a6af-4897f1740900/extract-content/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.255904 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4bfp9_18fb0a3d-c326-4741-a6af-4897f1740900/extract-content/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.377030 4747 generic.go:334] "Generic (PLEG): container finished" podID="7199d0fa-b334-4533-a3a4-fe1e4e405438" containerID="65512adc0a9e3f75f43acd1607c1dc0608467ff019b3797c988cbc2b4886e917" exitCode=0 Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.377074 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxxwf" event={"ID":"7199d0fa-b334-4533-a3a4-fe1e4e405438","Type":"ContainerDied","Data":"65512adc0a9e3f75f43acd1607c1dc0608467ff019b3797c988cbc2b4886e917"} Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.377101 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-cxxwf" event={"ID":"7199d0fa-b334-4533-a3a4-fe1e4e405438","Type":"ContainerDied","Data":"9de8e1eb98f79432e1bea7e163c3766310389c56171e4011224f73caefc353c0"} Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.377113 4747 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9de8e1eb98f79432e1bea7e163c3766310389c56171e4011224f73caefc353c0" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.425200 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.513081 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4bfp9_18fb0a3d-c326-4741-a6af-4897f1740900/extract-content/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.516773 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4bfp9_18fb0a3d-c326-4741-a6af-4897f1740900/extract-utilities/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.565332 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dffv8\" (UniqueName: \"kubernetes.io/projected/7199d0fa-b334-4533-a3a4-fe1e4e405438-kube-api-access-dffv8\") pod \"7199d0fa-b334-4533-a3a4-fe1e4e405438\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.565670 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-catalog-content\") pod \"7199d0fa-b334-4533-a3a4-fe1e4e405438\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.565892 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-utilities\") pod \"7199d0fa-b334-4533-a3a4-fe1e4e405438\" (UID: \"7199d0fa-b334-4533-a3a4-fe1e4e405438\") " Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.566931 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-utilities" (OuterVolumeSpecName: "utilities") pod "7199d0fa-b334-4533-a3a4-fe1e4e405438" (UID: "7199d0fa-b334-4533-a3a4-fe1e4e405438"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.572526 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7199d0fa-b334-4533-a3a4-fe1e4e405438-kube-api-access-dffv8" (OuterVolumeSpecName: "kube-api-access-dffv8") pod "7199d0fa-b334-4533-a3a4-fe1e4e405438" (UID: "7199d0fa-b334-4533-a3a4-fe1e4e405438"). InnerVolumeSpecName "kube-api-access-dffv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.612676 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7199d0fa-b334-4533-a3a4-fe1e4e405438" (UID: "7199d0fa-b334-4533-a3a4-fe1e4e405438"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.667050 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dffv8\" (UniqueName: \"kubernetes.io/projected/7199d0fa-b334-4533-a3a4-fe1e4e405438-kube-api-access-dffv8\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.667265 4747 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.667372 4747 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7199d0fa-b334-4533-a3a4-fe1e4e405438-utilities\") on node \"crc\" DevicePath \"\"" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.716240 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cxxwf_7199d0fa-b334-4533-a3a4-fe1e4e405438/extract-utilities/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.855470 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-4bfp9_18fb0a3d-c326-4741-a6af-4897f1740900/registry-server/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.877286 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cxxwf_7199d0fa-b334-4533-a3a4-fe1e4e405438/extract-content/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.902770 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cxxwf_7199d0fa-b334-4533-a3a4-fe1e4e405438/extract-utilities/0.log" Oct 01 06:46:50 crc kubenswrapper[4747]: I1001 06:46:50.915162 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cxxwf_7199d0fa-b334-4533-a3a4-fe1e4e405438/extract-content/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.099122 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cxxwf_7199d0fa-b334-4533-a3a4-fe1e4e405438/registry-server/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.104100 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cxxwf_7199d0fa-b334-4533-a3a4-fe1e4e405438/extract-utilities/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.125581 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-cxxwf_7199d0fa-b334-4533-a3a4-fe1e4e405438/extract-content/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.303941 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xksc_6e2e0a71-6f8f-49e7-b077-2dab23e591f2/extract-utilities/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.384600 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-cxxwf" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.402312 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-cxxwf"] Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.406925 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-cxxwf"] Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.487655 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xksc_6e2e0a71-6f8f-49e7-b077-2dab23e591f2/extract-content/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.494790 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xksc_6e2e0a71-6f8f-49e7-b077-2dab23e591f2/extract-content/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.505325 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xksc_6e2e0a71-6f8f-49e7-b077-2dab23e591f2/extract-utilities/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.637051 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xksc_6e2e0a71-6f8f-49e7-b077-2dab23e591f2/extract-utilities/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.660906 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xksc_6e2e0a71-6f8f-49e7-b077-2dab23e591f2/extract-content/0.log" Oct 01 06:46:51 crc kubenswrapper[4747]: I1001 06:46:51.860866 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1/util/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.068105 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9xksc_6e2e0a71-6f8f-49e7-b077-2dab23e591f2/registry-server/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.095338 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1/pull/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.117256 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1/pull/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.121862 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1/util/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.280957 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1/extract/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.310238 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1/pull/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.349465 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_f29efc416ca216184f30dbb4b19e0f463bdcecc8ef634322abbad88d96nxxpw_68aeb6d7-e9ed-45a7-a282-d41b9b3dfdc1/util/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.513451 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-mcc6b_f7a3b74b-4658-470c-bea5-0a7431da169e/marketplace-operator/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.541222 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh5bf_b657e3a5-1d77-412c-999e-1f386d9724bf/extract-utilities/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.659782 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh5bf_b657e3a5-1d77-412c-999e-1f386d9724bf/extract-utilities/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.696639 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh5bf_b657e3a5-1d77-412c-999e-1f386d9724bf/extract-content/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.708233 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh5bf_b657e3a5-1d77-412c-999e-1f386d9724bf/extract-content/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.829234 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh5bf_b657e3a5-1d77-412c-999e-1f386d9724bf/extract-utilities/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.941776 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-27npp_2b622aa3-38b6-46f4-b2bc-91208b5ecf20/extract-utilities/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.953618 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh5bf_b657e3a5-1d77-412c-999e-1f386d9724bf/extract-content/0.log" Oct 01 06:46:52 crc kubenswrapper[4747]: I1001 06:46:52.958616 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zh5bf_b657e3a5-1d77-412c-999e-1f386d9724bf/registry-server/0.log" Oct 01 06:46:53 crc kubenswrapper[4747]: I1001 06:46:53.097163 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-27npp_2b622aa3-38b6-46f4-b2bc-91208b5ecf20/extract-content/0.log" Oct 01 06:46:53 crc kubenswrapper[4747]: I1001 06:46:53.104976 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-27npp_2b622aa3-38b6-46f4-b2bc-91208b5ecf20/extract-content/0.log" Oct 01 06:46:53 crc kubenswrapper[4747]: I1001 06:46:53.109467 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-27npp_2b622aa3-38b6-46f4-b2bc-91208b5ecf20/extract-utilities/0.log" Oct 01 06:46:53 crc kubenswrapper[4747]: I1001 06:46:53.274224 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-27npp_2b622aa3-38b6-46f4-b2bc-91208b5ecf20/extract-utilities/0.log" Oct 01 06:46:53 crc kubenswrapper[4747]: I1001 06:46:53.284995 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7199d0fa-b334-4533-a3a4-fe1e4e405438" path="/var/lib/kubelet/pods/7199d0fa-b334-4533-a3a4-fe1e4e405438/volumes" Oct 01 06:46:53 crc kubenswrapper[4747]: I1001 06:46:53.290320 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-27npp_2b622aa3-38b6-46f4-b2bc-91208b5ecf20/extract-content/0.log" Oct 01 06:46:53 crc kubenswrapper[4747]: I1001 06:46:53.662237 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-27npp_2b622aa3-38b6-46f4-b2bc-91208b5ecf20/registry-server/0.log" Oct 01 06:46:58 crc kubenswrapper[4747]: I1001 06:46:58.051718 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["glance-kuttl-tests/glance-db-sync-rrxt7"] Oct 01 06:46:58 crc kubenswrapper[4747]: I1001 06:46:58.065501 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["glance-kuttl-tests/glance-db-sync-rrxt7"] Oct 01 06:46:58 crc kubenswrapper[4747]: I1001 06:46:58.277030 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:46:58 crc kubenswrapper[4747]: E1001 06:46:58.277332 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:46:59 crc kubenswrapper[4747]: I1001 06:46:59.301820 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c39714a-ba3e-4c86-88d6-0a60bc88d227" path="/var/lib/kubelet/pods/2c39714a-ba3e-4c86-88d6-0a60bc88d227/volumes" Oct 01 06:47:09 crc kubenswrapper[4747]: I1001 06:47:09.280845 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:47:09 crc kubenswrapper[4747]: E1001 06:47:09.281442 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:47:20 crc kubenswrapper[4747]: I1001 06:47:20.277303 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:47:20 crc kubenswrapper[4747]: E1001 06:47:20.278383 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:47:32 crc kubenswrapper[4747]: I1001 06:47:32.277310 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:47:32 crc kubenswrapper[4747]: E1001 06:47:32.278309 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:47:41 crc kubenswrapper[4747]: I1001 06:47:41.022498 4747 scope.go:117] "RemoveContainer" containerID="bb09985c27f593139b7d41ef2d5e785a872ef540cfcd8dc56be5420857a42b2d" Oct 01 06:47:41 crc kubenswrapper[4747]: I1001 06:47:41.070432 4747 scope.go:117] "RemoveContainer" containerID="749590e3d2182e0bab6ceeee832b22c2f68266702e759161b1a68972144972f4" Oct 01 06:47:45 crc kubenswrapper[4747]: I1001 06:47:45.277554 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:47:45 crc kubenswrapper[4747]: E1001 06:47:45.278561 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:47:53 crc kubenswrapper[4747]: I1001 06:47:53.028729 4747 generic.go:334] "Generic (PLEG): container finished" podID="1370403d-e53d-48fa-861f-0957fefb7adf" containerID="760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33" exitCode=0 Oct 01 06:47:53 crc kubenswrapper[4747]: I1001 06:47:53.028848 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bjw8g/must-gather-n94lr" event={"ID":"1370403d-e53d-48fa-861f-0957fefb7adf","Type":"ContainerDied","Data":"760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33"} Oct 01 06:47:53 crc kubenswrapper[4747]: I1001 06:47:53.030625 4747 scope.go:117] "RemoveContainer" containerID="760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33" Oct 01 06:47:54 crc kubenswrapper[4747]: I1001 06:47:54.010051 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bjw8g_must-gather-n94lr_1370403d-e53d-48fa-861f-0957fefb7adf/gather/0.log" Oct 01 06:47:57 crc kubenswrapper[4747]: I1001 06:47:57.277476 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:47:57 crc kubenswrapper[4747]: E1001 06:47:57.277791 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.156693 4747 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bjw8g/must-gather-n94lr"] Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.157370 4747 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-bjw8g/must-gather-n94lr" podUID="1370403d-e53d-48fa-861f-0957fefb7adf" containerName="copy" containerID="cri-o://4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e" gracePeriod=2 Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.163619 4747 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bjw8g/must-gather-n94lr"] Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.579147 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bjw8g_must-gather-n94lr_1370403d-e53d-48fa-861f-0957fefb7adf/copy/0.log" Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.579909 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.657095 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jv2f7\" (UniqueName: \"kubernetes.io/projected/1370403d-e53d-48fa-861f-0957fefb7adf-kube-api-access-jv2f7\") pod \"1370403d-e53d-48fa-861f-0957fefb7adf\" (UID: \"1370403d-e53d-48fa-861f-0957fefb7adf\") " Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.657184 4747 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370403d-e53d-48fa-861f-0957fefb7adf-must-gather-output\") pod \"1370403d-e53d-48fa-861f-0957fefb7adf\" (UID: \"1370403d-e53d-48fa-861f-0957fefb7adf\") " Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.677634 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1370403d-e53d-48fa-861f-0957fefb7adf-kube-api-access-jv2f7" (OuterVolumeSpecName: "kube-api-access-jv2f7") pod "1370403d-e53d-48fa-861f-0957fefb7adf" (UID: "1370403d-e53d-48fa-861f-0957fefb7adf"). InnerVolumeSpecName "kube-api-access-jv2f7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.746517 4747 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1370403d-e53d-48fa-861f-0957fefb7adf-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "1370403d-e53d-48fa-861f-0957fefb7adf" (UID: "1370403d-e53d-48fa-861f-0957fefb7adf"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.758606 4747 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jv2f7\" (UniqueName: \"kubernetes.io/projected/1370403d-e53d-48fa-861f-0957fefb7adf-kube-api-access-jv2f7\") on node \"crc\" DevicePath \"\"" Oct 01 06:48:01 crc kubenswrapper[4747]: I1001 06:48:01.758643 4747 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/1370403d-e53d-48fa-861f-0957fefb7adf-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.121144 4747 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bjw8g_must-gather-n94lr_1370403d-e53d-48fa-861f-0957fefb7adf/copy/0.log" Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.121577 4747 generic.go:334] "Generic (PLEG): container finished" podID="1370403d-e53d-48fa-861f-0957fefb7adf" containerID="4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e" exitCode=143 Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.121651 4747 scope.go:117] "RemoveContainer" containerID="4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e" Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.121660 4747 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bjw8g/must-gather-n94lr" Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.140044 4747 scope.go:117] "RemoveContainer" containerID="760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33" Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.181941 4747 scope.go:117] "RemoveContainer" containerID="4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e" Oct 01 06:48:02 crc kubenswrapper[4747]: E1001 06:48:02.182516 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e\": container with ID starting with 4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e not found: ID does not exist" containerID="4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e" Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.182565 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e"} err="failed to get container status \"4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e\": rpc error: code = NotFound desc = could not find container \"4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e\": container with ID starting with 4568053a18a7d94b3f5319c41c1b1cecdf54ed6597f041db6339f8443027b98e not found: ID does not exist" Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.182601 4747 scope.go:117] "RemoveContainer" containerID="760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33" Oct 01 06:48:02 crc kubenswrapper[4747]: E1001 06:48:02.183223 4747 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33\": container with ID starting with 760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33 not found: ID does not exist" containerID="760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33" Oct 01 06:48:02 crc kubenswrapper[4747]: I1001 06:48:02.183280 4747 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33"} err="failed to get container status \"760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33\": rpc error: code = NotFound desc = could not find container \"760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33\": container with ID starting with 760421b16f59a9008fd14a296d399c8e8ca870e45cb2fac3d438f47059650e33 not found: ID does not exist" Oct 01 06:48:03 crc kubenswrapper[4747]: I1001 06:48:03.286980 4747 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1370403d-e53d-48fa-861f-0957fefb7adf" path="/var/lib/kubelet/pods/1370403d-e53d-48fa-861f-0957fefb7adf/volumes" Oct 01 06:48:09 crc kubenswrapper[4747]: I1001 06:48:09.282456 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:48:09 crc kubenswrapper[4747]: E1001 06:48:09.283428 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:48:20 crc kubenswrapper[4747]: I1001 06:48:20.278321 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:48:20 crc kubenswrapper[4747]: E1001 06:48:20.279502 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:48:34 crc kubenswrapper[4747]: I1001 06:48:34.276622 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:48:34 crc kubenswrapper[4747]: E1001 06:48:34.277418 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:48:47 crc kubenswrapper[4747]: I1001 06:48:47.276952 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:48:47 crc kubenswrapper[4747]: E1001 06:48:47.278547 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:49:00 crc kubenswrapper[4747]: I1001 06:49:00.277440 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:49:00 crc kubenswrapper[4747]: E1001 06:49:00.278553 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:49:15 crc kubenswrapper[4747]: I1001 06:49:15.277142 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:49:15 crc kubenswrapper[4747]: E1001 06:49:15.278186 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:49:26 crc kubenswrapper[4747]: I1001 06:49:26.277587 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:49:26 crc kubenswrapper[4747]: E1001 06:49:26.278456 4747 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gh9dg_openshift-machine-config-operator(90df9e29-7482-4ab7-84c6-f3029df17a0d)\"" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" podUID="90df9e29-7482-4ab7-84c6-f3029df17a0d" Oct 01 06:49:38 crc kubenswrapper[4747]: I1001 06:49:38.276816 4747 scope.go:117] "RemoveContainer" containerID="8a5ea47d8a46d55a5c92e2ca06384aae85c1410539e07119bcb48236283d6d03" Oct 01 06:49:39 crc kubenswrapper[4747]: I1001 06:49:39.118546 4747 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gh9dg" event={"ID":"90df9e29-7482-4ab7-84c6-f3029df17a0d","Type":"ContainerStarted","Data":"67840c7b288de158c59d5614cff71abbcf1ad90fad597aedab74024cb6af4528"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067147457024465 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067147457017402 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067143244016513 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067143245015464 5ustar corecore